Skip to content

Commit

Permalink
Removing deepcopy in an attempt to fix codeQL errors
Browse files Browse the repository at this point in the history
  • Loading branch information
nv-braf committed Oct 10, 2023
1 parent cbdc746 commit 0c909ea
Showing 1 changed file with 7 additions and 9 deletions.
16 changes: 7 additions & 9 deletions model_analyzer/config/generate/perf_analyzer_config_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@

import json
import logging
from copy import deepcopy
from typing import Dict, Generator, List, Optional, Tuple

from model_analyzer.config.input.config_command_profile import ConfigCommandProfile
Expand Down Expand Up @@ -192,10 +191,7 @@ def set_last_results(

def _set_perf_analyzer_flags(self, model_perf_analyzer_flags: dict) -> dict:
# For LLM models we will be creating custom input data based on prompt length
perf_analyzer_flags = deepcopy(model_perf_analyzer_flags)
# perf_analyzer_flags = {
# key: value for key, value in model_perf_analyzer_flags.items()
# }
perf_analyzer_flags = {k: v for k, v in model_perf_analyzer_flags.items()}

if self._cli_config.is_llm_model():
perf_analyzer_flags.pop("input-data")
Expand All @@ -212,9 +208,9 @@ def _create_input_dict(self, model_perf_analyzer_flags: dict) -> dict:
return {}

def _modify_prompt_in_input_dict(self, prompt_length: int) -> Dict:
modified_input_dict = deepcopy(self._llm_input_dict)

modified_prompt = ["hi"] * prompt_length

modified_input_dict = {k: v for k, v in self._llm_input_dict.items()}
modified_input_dict["data"][0]["PROMPT"] = modified_prompt

return modified_input_dict
Expand Down Expand Up @@ -322,10 +318,12 @@ def _create_base_perf_config(self) -> PerfAnalyzerConfig:
def _extract_prompt_length(
self, unmodified_parameter_combination: Dict
) -> Tuple[int, Dict]:
modified_parameter_combination = {
k: v for k, v in unmodified_parameter_combination.items()
}

if self._cli_config.is_llm_model():
modified_parameter_combination = deepcopy(unmodified_parameter_combination)
prompt_length = modified_parameter_combination.pop("prompt-length")

return prompt_length, modified_parameter_combination
else:
return 0, unmodified_parameter_combination
Expand Down

0 comments on commit 0c909ea

Please sign in to comment.