Skip to content

Commit

Permalink
Adding ensemble support to optuna (#903)
Browse files Browse the repository at this point in the history
  • Loading branch information
nv-braf authored Jun 26, 2024
1 parent 5c7e283 commit 87ec68b
Show file tree
Hide file tree
Showing 4 changed files with 88 additions and 17 deletions.
1 change: 1 addition & 0 deletions model_analyzer/analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -431,6 +431,7 @@ def _populate_search_parameters(self, client, gpus):
config=self._config,
model=model_profile_spec,
is_bls_model=bool(self._config.bls_composing_models),
is_ensemble_model=model_profile_spec.is_ensemble(),
)

def _populate_composing_search_parameters(self, client, gpus):
Expand Down
91 changes: 75 additions & 16 deletions model_analyzer/config/generate/optuna_run_config_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
# limitations under the License.

import logging
from sys import maxsize
from typing import Any, Dict, Generator, List, Optional, TypeAlias, Union

import optuna
Expand Down Expand Up @@ -183,7 +184,7 @@ def get_configs(self) -> Generator[RunConfig, None, None]:
yield run_config

score = self._calculate_score()
self._set_best_measurement(run_config, score)
self._set_best_measurement(run_config, score, trial_number)

if logging.DEBUG:
self._print_debug_score_info(run_config, score)
Expand Down Expand Up @@ -429,15 +430,18 @@ def _create_objective_based_run_config(
) -> RunConfig:
run_config = RunConfig(self._triton_env)

# TODO: TMA-1927: Add support for multi-model
model_config_variant = self._create_model_config_variant(
self._models[0], trial_objectives
)

composing_model_config_variants = self._create_composing_model_config_variants(
composing_trial_objectives
)

# TODO: TMA-1927: Add support for multi-model
model_config_variant = self._create_model_config_variant(
model=self._models[0],
trial_objectives=trial_objectives,
composing_trial_objectives=composing_trial_objectives,
composing_model_config_variants=composing_model_config_variants,
)

# TODO: TMA-1927: Add support for multi-model
model_run_config = self._create_model_run_config(
model=self._models[0],
Expand All @@ -451,6 +455,44 @@ def _create_objective_based_run_config(
return run_config

def _create_parameter_combo(
self,
model: ModelProfileSpec,
trial_objectives: TrialObjectives,
composing_trial_objectives: ComposingTrialObjectives,
) -> ParameterCombo:
if model.is_ensemble():
param_combo = self._create_ensemble_parameter_combo(
composing_trial_objectives
)
else:
param_combo = self._create_non_ensemble_parameter_combo(trial_objectives)

return param_combo

def _create_ensemble_parameter_combo(
self,
composing_trial_objectives: ComposingTrialObjectives,
) -> ParameterCombo:
"""
For the ensemble model the only parameter we need to set
is the max batch size; which will be the minimum batch size
found in the composing_model max batch sizes
"""

min_val_of_max_batch_size = maxsize
for composing_trial_objective in composing_trial_objectives.values():
min_val_of_max_batch_size = int(
min(
composing_trial_objective.get("max_batch_size", 1),
min_val_of_max_batch_size,
)
)

param_combo = {"max_batch_size": min_val_of_max_batch_size}

return param_combo

def _create_non_ensemble_parameter_combo(
self, trial_objectives: TrialObjectives
) -> ParameterCombo:
param_combo: ParameterCombo = {}
Expand Down Expand Up @@ -481,17 +523,32 @@ def _create_parameter_combo(
return param_combo

def _create_model_config_variant(
self, model: ModelProfileSpec, trial_objectives: TrialObjectives
self,
model: ModelProfileSpec,
trial_objectives: TrialObjectives,
composing_trial_objectives: ComposingTrialObjectives = {},
composing_model_config_variants: List[ModelConfigVariant] = [],
) -> ModelConfigVariant:
param_combo = self._create_parameter_combo(trial_objectives)

model_config_variant = BaseModelConfigGenerator.make_model_config_variant(
param_combo=param_combo,
model=model,
model_variant_name_manager=self._model_variant_name_manager,
c_api_mode=self._c_api_mode,
param_combo = self._create_parameter_combo(
model, trial_objectives, composing_trial_objectives
)

if model.is_ensemble():
model_config_variant = BaseModelConfigGenerator.make_ensemble_model_config_variant(
model=model,
ensemble_composing_model_config_variants=composing_model_config_variants,
model_variant_name_manager=self._model_variant_name_manager,
param_combo=param_combo,
c_api_mode=self._c_api_mode,
)
else:
model_config_variant = BaseModelConfigGenerator.make_model_config_variant(
param_combo=param_combo,
model=model,
model_variant_name_manager=self._model_variant_name_manager,
c_api_mode=self._c_api_mode,
)

return model_config_variant

def _create_composing_model_config_variants(
Expand All @@ -500,8 +557,10 @@ def _create_composing_model_config_variants(
composing_model_config_variants = []
for composing_model in self._composing_models:
composing_model_config_variant = self._create_model_config_variant(
composing_model,
composing_trial_objectives[composing_model.model_name()],
model=composing_model,
trial_objectives=composing_trial_objectives[
composing_model.model_name()
],
)
composing_model_config_variants.append(composing_model_config_variant)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,15 @@ def create_run_config_generator(
new_models, command_config, client, gpus
)

for composing_model in composing_models:
composing_search_parameters[
composing_model.model_name()
] = SearchParameters(
config=command_config,
model=composing_model,
is_composing_model=True,
)

if command_config.run_config_search_mode == "optuna":
return RunConfigGeneratorFactory._create_optuna_plus_concurrency_sweep_run_config_generator(
command_config=command_config,
Expand Down
4 changes: 3 additions & 1 deletion model_analyzer/config/generate/search_parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,15 @@ def __init__(
model: ModelProfileSpec,
config: ConfigCommandProfile = ConfigCommandProfile(),
is_bls_model: bool = False,
is_ensemble_model: bool = False,
is_composing_model: bool = False,
):
self._config = config
self._parameters = model.parameters()
self._model_config_parameters = model.model_config_parameters()
self._supports_max_batch_size = model.supports_batching()
self._search_parameters: Dict[str, SearchParameter] = {}
self._is_ensemble_model = is_ensemble_model
self._is_bls_model = is_bls_model
self._is_composing_model = is_composing_model

Expand Down Expand Up @@ -196,7 +198,7 @@ def _populate_instance_group(self) -> None:
parameter_list=parameter_list,
parameter_category=ParameterCategory.INT_LIST,
)
else:
elif not self._is_ensemble_model:
# Need to populate instance_group based on RCS min/max values
# when no model config parameters are present
self._populate_rcs_parameter(
Expand Down

0 comments on commit 87ec68b

Please sign in to comment.