Skip to content

Commit

Permalink
change min/max to overall_min/overall_max + update comparison results…
Browse files Browse the repository at this point in the history
… publisher

Signed-off-by: Michael Oviedo <[email protected]>
  • Loading branch information
OVI3D0 committed Nov 12, 2024
1 parent 3ff5f24 commit af6e812
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 10 deletions.
19 changes: 13 additions & 6 deletions osbenchmark/aggregator.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,15 +202,22 @@ def calculate_weighted_average(self, task_metrics: Dict[str, List[Any]], iterati
weighted_metrics[metric][item_key] = values[0][item_key]
else:
item_values = [value.get(item_key, 0) for value in values]
weighted_sum = sum(value * iterations for value in item_values)
total_iterations = iterations * len(item_values)
weighted_avg = weighted_sum / total_iterations
weighted_metrics[metric][item_key] = weighted_avg
if item_key == 'min':
weighted_metrics[metric]['overall_min'] = min(item_values)
elif item_key == 'max':
weighted_metrics[metric]['overall_max'] = max(item_values)
elif item_key == 'median':
weighted_sum = sum(value * iterations for value in item_values)
total_iterations = iterations * len(item_values)
weighted_metrics[metric][item_key] = weighted_sum / total_iterations
else:
weighted_sum = sum(value * iterations for value in item_values)
total_iterations = iterations * len(item_values)
weighted_metrics[metric][item_key] = weighted_sum / total_iterations
else:
weighted_sum = sum(value * iterations for value in values)
total_iterations = iterations * len(values)
weighted_avg = weighted_sum / total_iterations
weighted_metrics[metric] = weighted_avg
weighted_metrics[metric] = weighted_sum / total_iterations

return weighted_metrics

Expand Down
8 changes: 4 additions & 4 deletions osbenchmark/results_publisher.py
Original file line number Diff line number Diff line change
Expand Up @@ -464,16 +464,16 @@ def _write_results(self, metrics_table, metrics_table_console):
data_plain=metrics_table, data_rich=metrics_table_console)

def _publish_throughput(self, baseline_stats, contender_stats, task):
b_min = baseline_stats.metrics(task)["throughput"]["min"]
b_min = baseline_stats.metrics(task)["throughput"].get("overall_min") or baseline_stats.metrics(task)["throughput"]["min"]
b_mean = baseline_stats.metrics(task)["throughput"]["mean"]
b_median = baseline_stats.metrics(task)["throughput"]["median"]
b_max = baseline_stats.metrics(task)["throughput"]["max"]
b_max = baseline_stats.metrics(task)["throughput"].get("overall_max") or baseline_stats.metrics(task)["throughput"]["max"]
b_unit = baseline_stats.metrics(task)["throughput"]["unit"]

c_min = contender_stats.metrics(task)["throughput"]["min"]
c_min = contender_stats.metrics(task)["throughput"].get("overall_min") or contender_stats.metrics(task)["throughput"]["min"]
c_mean = contender_stats.metrics(task)["throughput"]["mean"]
c_median = contender_stats.metrics(task)["throughput"]["median"]
c_max = contender_stats.metrics(task)["throughput"]["max"]
c_max = contender_stats.metrics(task)["throughput"].get("overall_max") or contender_stats.metrics(task)["throughput"]["max"]

return self._join(
self._line("Min Throughput", b_min, c_min, task, b_unit, treat_increase_as_improvement=True),
Expand Down

0 comments on commit af6e812

Please sign in to comment.