def get_element_data_from_sources(include_points, time_binding, use_txn_raw, result_id, el, statistics_list): viable_raw_element = (use_txn_raw and el['type'] in ["TRANSACTION"]) json_raws = [] json_points = [] if (include_points or time_binding is not None): if viable_raw_element: logging.getLogger().debug( "Starting get_element_data_from_sources for element ({})". format(el['id'])) json_raws = rest_crud.get( get_end_point(result_id, __operation_elements) + "/" + el['id'] + "/raw?format=JSON") else: json_points = rest_crud.get( get_end_point(result_id, __operation_elements) + "/" + el['id'] + "/points?statistics=" + ",".join(statistics_list)) if not time_binding is None: json_points = filter_by_time(json_points, time_binding, lambda p: int(p['from']) / 1000, lambda p: int(p['to']) / 1000) json_raws = filter_by_time(json_raws, time_binding, lambda p: p['Elapsed'] / 1000, lambda p: p['Elapsed'] / 1000) return (json_raws, json_points)
def should_raw_transactions_data(__id, time_filter): if time_filter is not None and can_raw_transactions_data(): # look for the transaction with the smallest number of iterations, but that has raw data # grab all transactions list json_elements_transactions = rest_crud.get(get_end_point(__id, __operation_elements) + "?" + QUERY_CATEGORY_TRANSACTION) txns = [] for el in json_elements_transactions: if el['type'] != 'TRANSACTION': continue # grab count of this transaction and only add if there are iterations json_values = rest_crud.get(get_end_point(__id, __operation_elements) + "/" + el['id'] + "/values") if json_values['count'] > 0: txns.append({ 'id': el['id'], 'count': json_values['count'] }) # sort ascending by count (smallest number of iterations produces smallest amount of data) txns = sorted(txns, key=lambda el: el['count']) for el in txns: this_raw_count = len(rest_crud.get(get_end_point(__id, __operation_elements) + "/" + el['id'] + "/raw?format=JSON")) if this_raw_count > 0: logging.debug("use_raw[1]: True") return True logging.debug("use_raw[1]: False") return False
def junit(__id, junit_file): json_result = rest_crud.get(get_end_point(__id)) json_sla_test = rest_crud.get(get_end_point(__id, __operation_sla_test)) json_sla_interval = rest_crud.get( get_end_point(__id, __operation_sla_interval)) displayer.print_result_junit(json_result, json_sla_test, json_sla_interval, junit_file)
def summary(__id): json_result = rest_crud.get(get_end_point(__id)) json_sla_global = rest_crud.get(get_end_point(__id, __operation_sla_global)) json_sla_test = rest_crud.get(get_end_point(__id, __operation_sla_test)) json_sla_interval = rest_crud.get(get_end_point(__id, __operation_sla_interval)) json_stats = rest_crud.get(get_end_point(__id, __operation_statistics)) displayer.print_result_summary(json_result, json_sla_global, json_sla_test, json_sla_interval, json_stats) return exit_process(json_result, json_sla_global, json_sla_test, json_sla_interval)
def fill_single_slas(__id, components, data): if components['slas']: status = data['summary']['status'] gprint("Getting global SLAs...") data['sla_global'] = [] if status!='TERMINATED' else rest_crud.get(get_end_point(__id, __operation_sla_global)) gprint("Getting per-test SLAs...") data['sla_test'] = [] if status!='TERMINATED' else rest_crud.get(get_end_point(__id, __operation_sla_test)) gprint("Getting per-interval SLAs...") data['sla_interval'] = rest_crud.get(get_end_point(__id, __operation_sla_interval))
def fill_trend_result(result, all_transactions, elements_filter, time_filter): gprint("fill_trend_result: Getting test '" + result["name"] + "' (" + result["id"] + ") statistics...") __id = result["id"] result = add_test_result_summary_fields(result) json_stats = rest_crud.get(get_end_point(__id, __operation_statistics)) statistics_list = get_standard_statistics_list() time_binding = None if time_filter is not None: time_binding = {"time_filter": time_filter} if time_binding is not None: time_binding["summary"] = result time_binding = fill_time_binding(time_binding) found_elements = [] elements = [] # elements.extend(rest_crud.get(get_end_point(__id, __operation_elements) + "?category=REQUEST")) transactions = rest_crud.get( get_end_point(__id, __operation_elements) + "?" + QUERY_CATEGORY_TRANSACTION) elements.extend(transactions) if elements_filter is not None and not (result['terminationReason'] in ['FAILED_TO_START']): filters = parse_elements_filter(elements_filter) logging.debug("Using filters: {}".format(filters)) logging.debug("Filtering elements: {}".format(len(elements))) found_elements = filter_elements(elements, elements_filter) logging.debug("Filtered elements: {}".format(len(found_elements))) else: found_elements = elements if len(elements) > 0: use_txn_raw = should_raw_transactions_data(__id, time_filter) found_elements = get_elements_data(__id, found_elements, time_binding, True, statistics_list, use_txn_raw) found_elements = sorted(found_elements, key=lambda x: x["aggregate"]["avgDuration"], reverse=True) all_transactions.extend( list(filter(lambda el: el["type"] == "TRANSACTION", found_elements))) result["elements"] = found_elements result["statistics"] = json_stats return result
def fill_single_monitors(__id, components, data): if components['monitors'] or components['controller_points'] or components['ext_data']: gprint("Getting monitors...") filled = rest_crud.get(get_end_point(__id, __operation_monitors)) filled = get_mon_datas(__id, lambda m: True, filled, True) filled = list(sorted(filled, key=lambda x: x['display_name'])) data['monitors'] = filled
def fill_single_requests(__id, elements_filter, time_binding, statistics_list, use_txn_raw, components, data): if components['all_requests']: gprint("Getting all-request data...") json_elements_requests = rest_crud.get( get_end_point(__id, __operation_elements) + "?category=REQUEST") json_elements_all_requests = list( filter(lambda m: m['id'] == 'all-requests', json_elements_requests)) json_elements_all_requests_preserve = json_elements_all_requests if not elements_filter is None: json_elements_all_requests = filter_elements( json_elements_all_requests, elements_filter) if not any( filter(lambda m: m['id'] == 'all-requests', json_elements_all_requests)): json_elements_all_requests = json_elements_all_requests + json_elements_all_requests_preserve json_elements_all_requests = get_elements_data( __id, json_elements_all_requests, time_binding, True, statistics_list, use_txn_raw) data['all_requests'] = json_elements_all_requests[ 0] if json_elements_all_requests is not None and len( json_elements_all_requests) > 0 else {},
def fill_single_transactions(__id, elements_filter, time_binding, statistics_list, use_txn_raw, components, data): if components['transactions']: gprint("Getting transactions...") json_elements_transactions = rest_crud.get( get_end_point(__id, __operation_elements) + "?" + QUERY_CATEGORY_TRANSACTION) if not elements_filter is None: json_elements_transactions = filter_elements( json_elements_transactions, elements_filter) json_elements_transactions = get_elements_data( __id, json_elements_transactions, time_binding, True, statistics_list, use_txn_raw) no_display_name = list( filter(lambda x: 'display_name' not in x, json_elements_transactions)) if len(no_display_name) > 0: logging.error("{} elements had no 'display_name': {}".format( len(no_display_name), no_display_name)) json_elements_transactions = list( sorted(list( filter(lambda x: 'display_name' in x, json_elements_transactions)), key=lambda x: x['display_name'])) data['elements']['transactions'] = json_elements_transactions
def get_element_data(el, result_id, time_binding, include_points, statistics_list, use_txn_raw): full_name = get_element_full_name(el) parent = get_element_parent(el) user_path = get_element_user_path(el) gprint("Getting element values for '" + full_name + "'") json_values = rest_crud.get(get_end_point(result_id, __operation_elements) + "/" + el['id'] + "/values") is_full_test_duration = time_binding is None or time_binding['is_full_test_duration'] (json_raws,json_points) = get_element_data_from_sources(include_points, time_binding, use_txn_raw, result_id, el, statistics_list) if not is_full_test_duration: time_binding_duration = time_binding['to_secs'] - time_binding['from_secs'] if len(json_raws) > 0: (perc_points,sum_of_count,sum_of_errors) = get_element_data_by_raws(json_raws,json_values) else: (perc_points,sum_of_count,sum_of_errors) = get_element_data_by_points(json_points,json_values) # from either data source, calculate common aggregates fill_element_data_common_values(json_values,perc_points,sum_of_count,sum_of_errors, time_binding_duration) # { # "Elapsed": 38736, # "Time": "2020-10-05T20:58:36.487Z", # "User Path": "Post", # "Virtual User ID": "0-1", # "Parent": "Actions", # "Element": "Click Submit", # "Response time": 947, # "Success": "yes", # "Population": "popPost", # "Zone": "Default zone" # } perc_fields = list(filter(lambda x: x.startswith('percentile'), json_values.keys())) convert_to_seconds = ['minDuration','maxDuration','sumDuration','avgDuration'] \ + perc_fields convert_element_fields_to_seconds(convert_to_seconds,json_values) round_fields = convert_to_seconds + ['elementPerSecond', 'successRate', 'successPerSecond', 'failureRate', 'failurePerSecond'] round_element_fields(round_fields,json_values,3) el["display_name"] = full_name el["parent"] = parent el["user_path"] = user_path el["aggregate_already_aggregated_data"] = not use_txn_raw and not is_full_test_duration el["aggregate"] = json_values el["points"] = json_points el["raw"] = json_raws el["totalCount"] = el["aggregate"]["successCount"] + el["aggregate"]["failureCount"] if el["totalCount"] == 0: el["successRate"] = 0 el["failureRate"] = 0 else: el["successRate"] = el["aggregate"]["successCount"] / el["totalCount"] el["failureRate"] = el["aggregate"]["failureCount"] / el["totalCount"] return el
def get_results_by_result_id(__id, count_back, count_ahead): result = rest_crud.get(get_end_point(__id)) project = result["project"] scenario = result["scenario"] logging.debug({'project': project, 'scenario': scenario}) total_expected = -count_back + 1 + count_ahead results = [] logging.debug("based_id: {}".format(__id)) logging.debug("total_expected: {}".format(total_expected)) logging.debug("count_back: {}".format(count_back)) logging.debug("count_ahead: {}".format(count_ahead)) page_size = 200 params = { 'limit': page_size, 'offset': 0, 'sort': '-startDate', 'project': project } # Get first page all_entities = [] # Get all other pages while len(results) < total_expected: entities = rest_crud.get(get_versioned_endpoint_base(), params) ret_count = len(entities) # Exit the loop when the pagination is not implemented for the endpoint and the number of entities is equal to page_size if ret_count == 0: break entities = list(filter(lambda el: el['scenario'] == scenario, entities)) all_entities += entities params['offset'] += page_size arr_sorted_by_time = list( sorted(all_entities, key=lambda x: x["startDate"])) results = compile_results_from_source(__id, arr_sorted_by_time, count_back, count_ahead) if ret_count < page_size: break return results
def get_sla_data_by_name_or_id(name): __id = get_id_by_name_or_id(name) json_result = rest_crud.get(get_end_point(__id)) status = json_result['status'] json_sla_global = [] if status != 'TERMINATED' else rest_crud.get(get_end_point(__id, __operation_sla_global)) json_sla_test = [] if status != 'TERMINATED' else rest_crud.get(get_end_point(__id, __operation_sla_test)) json_sla_interval = rest_crud.get(get_end_point(__id, __operation_sla_interval)) json_stats = rest_crud.get(get_end_point(__id, __operation_statistics)) return { 'id': __id, 'result': json_result, 'stats': json_stats, 'sla_global': json_sla_global, 'sla_test': json_sla_test, 'sla_interval': json_sla_interval }
def __fill_map(self, name=None): self.__map = {} all_element = rest_crud.get(self.__endpoint) json = None for element in all_element: name_ = element['name'] self.__map[name_] = element['id'] if name_ == name: json = element return json
def fill_single_summary(__id, time_binding, time_filter, components, data): if components['summary'] or components['slas'] or time_filter is not None: gprint("Getting test results...") json_result = rest_crud.get(get_end_point(__id)) json_result = add_test_result_summary_fields(json_result) data['summary'] = json_result if time_binding is not None: time_binding["summary"] = json_result time_binding = fill_time_binding(time_binding) return time_binding
def get_named_or_id(name, is_id_, resolver): endpoint = resolver.get_endpoint() if not is_id_: json_or_id = resolver.resolve_name_or_json(name) if type(json_or_id) is not str: return json_or_id else: name = json_or_id return rest_crud.get(endpoint + "/" + name)
def cli(name_or_id, static_dynamic, human): """read of NeoLoad Web zones""" resp = rest_crud.get("/v2/resources/zones") resp = [ elem for elem in resp if filter_result(elem, name_or_id, static_dynamic) ] if human: print_human(resp) else: tools.print_json(resp)
def display_status(results_id): global __last_status res = rest_crud.get('v2/test-results/' + results_id) status = res['status'] if __last_status != status: print("Status: " + status) __last_status = status if status == "RUNNING": display_statistics(results_id, res) if status == "TERMINATED": return False return True
def display_status(results_id): global __last_status res = rest_crud.get(test_results.get_end_point(results_id)) status = res.get('status') if __last_status != status: print("Status: " + status) __last_status = status if status == "RUNNING": display_statistics(results_id, res) if status == "TERMINATED": return False return True
def display_statistics(results_id, json_summary): res = rest_crud.get(__endpoint + results_id + '/statistics') time_cur = datetime.datetime.now() - datetime.datetime.fromtimestamp( (json_summary['startDate'] + 1) / 1000) time_cur_format = format_delta(time_cur) lg_count = json_summary['lgCount'] duration_raw = json_summary['duration'] duration = format_delta(datetime.timedelta( seconds=(duration_raw / 1000))) if duration_raw else " - " throughput = res['totalGlobalDownloadedBytesPerSecond'] error_count = res['totalGlobalCountFailure'] vu_count = res['lastVirtualUserCount'] request_sec = res['lastRequestCountPerSecond'] request_duration = res['totalRequestDurationAverage'] print( f' {time_cur_format}/{duration}\t Err[{error_count}], LGs[{lg_count}]\t VUs:{vu_count}\t BPS[{throughput}]\t RPS:{request_sec:.3f}\t avg(rql): {request_duration}' )
def get_mon_datas(result_id, l_selector, base_col, include_points): mons = [] for mon in list(filter(l_selector, base_col)): mons.append(mon) for mon in mons: full_name = get_element_full_name(mon) gprint("Getting monitor values for '" + full_name + "'") mon_points = rest_crud.get(get_end_point(result_id, __operation_monitors) + "/" + mon['id'] + "/points") time_points = list(sorted(mon_points, key=lambda x: x['from'])) perc_points = list(sorted(map(lambda x: x['AVG'], mon_points))) mon["display_name"] = full_name mon["percentiles"] = { 'percentile50': percentile(perc_points,0.5), 'percentile90': percentile(perc_points,0.9), 'percentile95': percentile(perc_points,0.95), 'percentile99': percentile(perc_points,0.99) } mon["points"] = time_points if include_points else [] return mons
def fill_single_transactions(__id, elements_filter, time_binding, statistics_list, use_txn_raw, components, data): if components['transactions']: gprint("Getting transactions...") json_elements_transactions = rest_crud.get( get_end_point(__id, __operation_elements) + "?" + QUERY_CATEGORY_TRANSACTION) if not elements_filter is None: json_elements_transactions = filter_elements( json_elements_transactions, elements_filter) json_elements_transactions = get_elements_data( __id, json_elements_transactions, time_binding, True, statistics_list, use_txn_raw) json_elements_transactions = list( sorted(json_elements_transactions, key=lambda x: x['display_name'])) data['elements']['transactions'] = json_elements_transactions
def get_zones(): return rest_crud.get(get_end_point())
def fill_single_events(__id, components, data): if components['events']: gprint("Getting events...") data['events'] = rest_crud.get(get_end_point(__id, __operation_events))
def fill_single_stats(__id, components, data): if components['statistics']: gprint("Getting test statistics...") data['statistics'] = rest_crud.get(get_end_point(__id, __operation_statistics))
def ls(name, is_id_, resolver): endpoint = resolver.get_endpoint() if name: get_id_and_print_json(get_named_or_id(name, is_id_, resolver)) else: print_json(rest_crud.get(endpoint))
def get_json_summary(__id): return {"summary": rest_crud.get(get_end_point(__id))}
def get_front_url_by_private_entrypoint(): response = rest_crud.get( '/nlweb/rest/rest-api/url-api/v1/action/get-front-end-url') return response['frontEndUrl']['rootUrl']