Skip to content

Commit

Permalink
Bug fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
IzzyPutterman committed May 14, 2024
1 parent 2093d1b commit 1110e3d
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -579,7 +579,7 @@ def _convert_generic_json_to_trtllm_backend_format(
text_input_headers,
) = cls._determine_json_feature_roles(dataset_json)

pa_json = cls._populate_trtllm_output_json(
pa_json = cls._populate_trtllm_backend_output_json(
dataset_json,
system_role_headers,
user_role_headers,
Expand Down Expand Up @@ -819,7 +819,7 @@ def _populate_trtllm_output_json(

return pa_json

@classmethod
@classmethod
def _populate_trtllm_backend_output_json(
cls,
dataset_json: Dict,
Expand Down
2 changes: 1 addition & 1 deletion src/c++/perf_analyzer/genai-perf/genai_perf/llm_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -652,7 +652,7 @@ def _tokenize_openai_request_input(self, req_inputs: dict) -> List[int]:
def _tokenize_response_outputs(self, res_outputs: dict) -> List[List[int]]:
"""Deserialize the response output and return tokenized outputs."""
if self._service_kind == "triton" and self._response_format == ResponseFormat.TENSORRTLLM_BACKEND:
return self._tokenize_trtllm_response_output(req_inputs)
return self._tokenize_trtllm_response_output(res_outputs)
elif self._service_kind == "triton":
return self._tokenize_triton_response_output(res_outputs)
elif self._service_kind == "openai":
Expand Down

0 comments on commit 1110e3d

Please sign in to comment.