From c959139c2adda89fd859061369a6f43ad8fbdba4 Mon Sep 17 00:00:00 2001 From: Brian Raf Date: Mon, 24 Jun 2024 14:21:27 +0000 Subject: [PATCH] Adding ensemble support to optuna --- model_analyzer/analyzer.py | 1 + .../generate/optuna_run_config_generator.py | 91 +++++++++++++++---- .../generate/run_config_generator_factory.py | 9 ++ .../config/generate/search_parameters.py | 4 +- 4 files changed, 88 insertions(+), 17 deletions(-) diff --git a/model_analyzer/analyzer.py b/model_analyzer/analyzer.py index f218e1256..39fc10f34 100755 --- a/model_analyzer/analyzer.py +++ b/model_analyzer/analyzer.py @@ -431,6 +431,7 @@ def _populate_search_parameters(self, client, gpus): config=self._config, model=model_profile_spec, is_bls_model=bool(self._config.bls_composing_models), + is_ensemble_model=model_profile_spec.is_ensemble(), ) def _populate_composing_search_parameters(self, client, gpus): diff --git a/model_analyzer/config/generate/optuna_run_config_generator.py b/model_analyzer/config/generate/optuna_run_config_generator.py index 55757d65f..b08b15d1c 100755 --- a/model_analyzer/config/generate/optuna_run_config_generator.py +++ b/model_analyzer/config/generate/optuna_run_config_generator.py @@ -15,6 +15,7 @@ # limitations under the License. import logging +from sys import maxsize from typing import Any, Dict, Generator, List, Optional, TypeAlias, Union import optuna @@ -183,7 +184,7 @@ def get_configs(self) -> Generator[RunConfig, None, None]: yield run_config score = self._calculate_score() - self._set_best_measurement(run_config, score) + self._set_best_measurement(run_config, score, trial_number) if logging.DEBUG: self._print_debug_score_info(run_config, score) @@ -429,15 +430,18 @@ def _create_objective_based_run_config( ) -> RunConfig: run_config = RunConfig(self._triton_env) - # TODO: TMA-1927: Add support for multi-model - model_config_variant = self._create_model_config_variant( - self._models[0], trial_objectives - ) - composing_model_config_variants = self._create_composing_model_config_variants( composing_trial_objectives ) + # TODO: TMA-1927: Add support for multi-model + model_config_variant = self._create_model_config_variant( + model=self._models[0], + trial_objectives=trial_objectives, + composing_trial_objectives=composing_trial_objectives, + composing_model_config_variants=composing_model_config_variants, + ) + # TODO: TMA-1927: Add support for multi-model model_run_config = self._create_model_run_config( model=self._models[0], @@ -451,6 +455,44 @@ def _create_objective_based_run_config( return run_config def _create_parameter_combo( + self, + model: ModelProfileSpec, + trial_objectives: TrialObjectives, + composing_trial_objectives: ComposingTrialObjectives, + ) -> ParameterCombo: + if model.is_ensemble(): + param_combo = self._create_ensemble_parameter_combo( + composing_trial_objectives + ) + else: + param_combo = self._create_non_ensemble_parameter_combo(trial_objectives) + + return param_combo + + def _create_ensemble_parameter_combo( + self, + composing_trial_objectives: ComposingTrialObjectives, + ) -> ParameterCombo: + """ + For the ensemble model the only parameter we need to set + is the max batch size; which will be the minimum batch size + found in the composing_model max batch sizes + """ + + min_val_of_max_batch_size = maxsize + for composing_trial_objective in composing_trial_objectives.values(): + min_val_of_max_batch_size = int( + min( + composing_trial_objective.get("max_batch_size", 1), + min_val_of_max_batch_size, + ) + ) + + param_combo = {"max_batch_size": min_val_of_max_batch_size} + + return param_combo + + def _create_non_ensemble_parameter_combo( self, trial_objectives: TrialObjectives ) -> ParameterCombo: param_combo: ParameterCombo = {} @@ -481,17 +523,32 @@ def _create_parameter_combo( return param_combo def _create_model_config_variant( - self, model: ModelProfileSpec, trial_objectives: TrialObjectives + self, + model: ModelProfileSpec, + trial_objectives: TrialObjectives, + composing_trial_objectives: ComposingTrialObjectives = {}, + composing_model_config_variants: List[ModelConfigVariant] = [], ) -> ModelConfigVariant: - param_combo = self._create_parameter_combo(trial_objectives) - - model_config_variant = BaseModelConfigGenerator.make_model_config_variant( - param_combo=param_combo, - model=model, - model_variant_name_manager=self._model_variant_name_manager, - c_api_mode=self._c_api_mode, + param_combo = self._create_parameter_combo( + model, trial_objectives, composing_trial_objectives ) + if model.is_ensemble(): + model_config_variant = BaseModelConfigGenerator.make_ensemble_model_config_variant( + model=model, + ensemble_composing_model_config_variants=composing_model_config_variants, + model_variant_name_manager=self._model_variant_name_manager, + param_combo=param_combo, + c_api_mode=self._c_api_mode, + ) + else: + model_config_variant = BaseModelConfigGenerator.make_model_config_variant( + param_combo=param_combo, + model=model, + model_variant_name_manager=self._model_variant_name_manager, + c_api_mode=self._c_api_mode, + ) + return model_config_variant def _create_composing_model_config_variants( @@ -500,8 +557,10 @@ def _create_composing_model_config_variants( composing_model_config_variants = [] for composing_model in self._composing_models: composing_model_config_variant = self._create_model_config_variant( - composing_model, - composing_trial_objectives[composing_model.model_name()], + model=composing_model, + trial_objectives=composing_trial_objectives[ + composing_model.model_name() + ], ) composing_model_config_variants.append(composing_model_config_variant) diff --git a/model_analyzer/config/generate/run_config_generator_factory.py b/model_analyzer/config/generate/run_config_generator_factory.py index 7377f1831..419352f3b 100755 --- a/model_analyzer/config/generate/run_config_generator_factory.py +++ b/model_analyzer/config/generate/run_config_generator_factory.py @@ -95,6 +95,15 @@ def create_run_config_generator( new_models, command_config, client, gpus ) + for composing_model in composing_models: + composing_search_parameters[ + composing_model.model_name() + ] = SearchParameters( + config=command_config, + model=composing_model, + is_composing_model=True, + ) + if command_config.run_config_search_mode == "optuna": return RunConfigGeneratorFactory._create_optuna_plus_concurrency_sweep_run_config_generator( command_config=command_config, diff --git a/model_analyzer/config/generate/search_parameters.py b/model_analyzer/config/generate/search_parameters.py index 2f3f387fe..90ffe0e2a 100755 --- a/model_analyzer/config/generate/search_parameters.py +++ b/model_analyzer/config/generate/search_parameters.py @@ -46,6 +46,7 @@ def __init__( model: ModelProfileSpec, config: ConfigCommandProfile = ConfigCommandProfile(), is_bls_model: bool = False, + is_ensemble_model: bool = False, is_composing_model: bool = False, ): self._config = config @@ -53,6 +54,7 @@ def __init__( self._model_config_parameters = model.model_config_parameters() self._supports_max_batch_size = model.supports_batching() self._search_parameters: Dict[str, SearchParameter] = {} + self._is_ensemble_model = is_ensemble_model self._is_bls_model = is_bls_model self._is_composing_model = is_composing_model @@ -196,7 +198,7 @@ def _populate_instance_group(self) -> None: parameter_list=parameter_list, parameter_category=ParameterCategory.INT_LIST, ) - else: + elif not self._is_ensemble_model: # Need to populate instance_group based on RCS min/max values # when no model config parameters are present self._populate_rcs_parameter(