def parse_res_file(path): """Read data from file ends with ".res". Args: path (str): The position of the res file. Returns: incremental_metrics (list, json array): The throughput at different time. """ with open(path) as csvfile: reader = csv.DictReader(csvfile, delimiter=',') incremental_metrics = [] for row in reader: metrics_instance = { "time": float(get_value_by_pattern(row, 'time', None)), "throughput": float(get_value_by_pattern(row, 'throughput', None)) } latency = {} for key, pattern in LATENCY_ATTRIBUTE_MAPPING: value = get_value_by_pattern(row, pattern, None) latency[key] = float("{:.4}".format(value)) if value else value metrics_instance['latency'] = latency incremental_metrics.append(metrics_instance) return incremental_metrics
def parse_latency_data(latency_dict): """ Parses the latency data from the format it is stored in the summary file to the format we need for the API request. Args: latency_dict (dict): The "Latency Distribution" json object in the OLTPBench summary file. Returns: latency (dict): The latency dict required to send to the performance storage service. """ latency = {} for key, pattern in LATENCY_ATTRIBUTE_MAPPING: value = get_value_by_pattern(latency_dict, pattern, None) latency[key] = float("{:.4}".format(value)) if value else value return latency
def parse_metrics(summary): """ Collect the OLTPBench results metrics from the summary file data """ return { 'throughput': get_value_by_pattern(summary, 'throughput', '-1.0'), 'latency': parse_latency_data(summary.get('Latency Distribution', {})) }
def parse_timestamp(summary): """ Get the timestamp in milliseconds from the summary file data """ return int(get_value_by_pattern(summary, 'timestamp', str(time())))