Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into neural_search
Browse files Browse the repository at this point in the history
  • Loading branch information
vpehkone committed Apr 5, 2024
2 parents e6e3910 + bd79dc0 commit 1930e18
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 3 deletions.
6 changes: 6 additions & 0 deletions osbenchmark/benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,11 @@ def add_workload_source(subparser):
"--contender",
required=True,
help=f"TestExecution ID of the contender (see {PROGRAM_NAME} list test_executions).")
compare_parser.add_argument(
"--percentiles",
help=f"A comma-separated list of percentiles to report latency and service time."
f"(default: {metrics.GlobalStatsCalculator.DEFAULT_LATENCY_PERCENTILES}).",
default=metrics.GlobalStatsCalculator.DEFAULT_LATENCY_PERCENTILES)
compare_parser.add_argument(
"--results-format",
help="Define the output format for the command line results (default: markdown).",
Expand Down Expand Up @@ -834,6 +839,7 @@ def dispatch_sub_command(arg_parser, args, cfg):
try:
if sub_command == "compare":
configure_results_publishing_params(args, cfg)
cfg.add(config.Scope.applicationOverride, "results_publishing", "percentiles", args.percentiles)
results_publisher.compare(cfg, args.baseline, args.contender)
elif sub_command == "list":
cfg.add(config.Scope.applicationOverride, "system", "list.config.option", args.configuration)
Expand Down
5 changes: 3 additions & 2 deletions osbenchmark/results_publisher.py
Original file line number Diff line number Diff line change
Expand Up @@ -337,14 +337,15 @@ def _line(self, k, task, v, unit, converter=lambda x: x, force=False):

class ComparisonResultsPublisher:
def __init__(self, config):
self.logger = logging.getLogger(__name__)
self.results_file = config.opts("results_publishing", "output.path")
self.results_format = config.opts("results_publishing", "format")
self.numbers_align = config.opts("results_publishing", "numbers.align",
mandatory=False, default_value="right")
self.cwd = config.opts("node", "benchmark.cwd")
self.show_processing_time = convert.to_bool(config.opts("results_publishing", "output.processingtime",
mandatory=False, default_value=False))
self.latency_percentiles = comma_separated_string_to_number_list(config.opts("workload", "latency.percentiles", mandatory=False))
self.percentiles = comma_separated_string_to_number_list(config.opts("results_publishing", "percentiles", mandatory=False))
self.plain = False

def publish(self, r1, r2):
Expand Down Expand Up @@ -442,7 +443,7 @@ def _publish_processing_time(self, baseline_stats, contender_stats, task):

def _publish_percentiles(self, name, task, baseline_values, contender_values):
lines = []
for percentile in metrics.percentiles_for_sample_size(sys.maxsize, percentiles_list=self.latency_percentiles):
for percentile in metrics.percentiles_for_sample_size(sys.maxsize, percentiles_list=self.percentiles):
baseline_value = baseline_values.get(metrics.encode_float_key(percentile))
contender_value = contender_values.get(metrics.encode_float_key(percentile))
self._append_non_empty(lines, self._line("%sth percentile %s" % (percentile, name),
Expand Down
8 changes: 7 additions & 1 deletion osbenchmark/worker_coordinator/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -480,6 +480,7 @@ async def __call__(self, opensearch, params):
The following keys are optional:
* ``pipeline``: If present, runs the the specified ingest pipeline for this bulk.
* ``request-params``: If present, they will be passed as parameters of bulk.
* ``detailed-results``: If ``True``, the runner will analyze the response and add detailed meta-data. Defaults to ``False``. Note
that this has a very significant impact on performance and will very
likely cause a bottleneck in the benchmark worker_coordinator so please
Expand All @@ -491,12 +492,17 @@ async def __call__(self, opensearch, params):
``None`` and potentially falls back to the global timeout setting.
"""
detailed_results = params.get("detailed-results", False)
api_kwargs = self._default_kw_params(params)

bulk_params = {}
if "pipeline" in params:
bulk_params["pipeline"] = params["pipeline"]

if "request-params" in params:
bulk_params.update(params["request-params"])
params.pop( "request-params" )

api_kwargs = self._default_kw_params(params)

with_action_metadata = mandatory(params, "action-metadata-present", self)
bulk_size = mandatory(params, "bulk-size", self)
unit = mandatory(params, "unit", self)
Expand Down

0 comments on commit 1930e18

Please sign in to comment.