Skip to content

Commit

Permalink
Merge branch 'habana_main' into dev/mfylcek/sampler-aware_batch_size_…
Browse files Browse the repository at this point in the history
…padding
  • Loading branch information
mfylcek authored Jan 10, 2025
2 parents deda42a + 73aaf71 commit 2520ec5
Show file tree
Hide file tree
Showing 17 changed files with 459 additions and 238 deletions.
2 changes: 1 addition & 1 deletion .jenkins/test_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -57,4 +57,4 @@ stages:
command: TORCH_COMPILE_DISABLE=true VLLM_CONTIGUOUS_PA=false VLLM_SKIP_WARMUP=True pytest -v tests/spec_decode/e2e/test_medusa_correctness.py::test_medusa_e2e_greedy_correctness
- name: gsm8k_small_g2_tp1_eagle_spec_decode
flavor: g2
command: TORCH_COMPILE_DISABLE=true VLLM_CONTIGUOUS_PA=false VLLM_SKIP_WARMUP=True pytest -v tests/spec_decode/e2e/test_eagle_correctness.py::test_eagle_e2e_greedy_correctness
command: VLLM_COS_SIN_RECOMPUTE=true TORCH_COMPILE_DISABLE=true VLLM_CONTIGUOUS_PA=false VLLM_SKIP_WARMUP=True pytest -v tests/spec_decode/e2e/test_eagle_correctness.py::test_eagle_e2e_greedy_correctness
4 changes: 4 additions & 0 deletions benchmarks/benchmark_throughput.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,10 @@ def _get_prompt_for_image_model(question: str, *, model: str) -> str:
model = model.lower()
if "pixtral" in model:
return f"<s>[INST]{question}\n[IMG][/INST]"
elif "llava" in model:
return f"USER: <image>\n{question}\nASSISTANT:"
elif "llama-3.2" in model:
return f"<|image|><|begin_of_text|>{question}"
raise ValueError(f"Unsupported model {model}")


Expand Down
1 change: 1 addition & 0 deletions tests/lora/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ def cleanup_fixture(should_do_global_cleanup_after_test: bool):

@pytest.fixture
def dist_init():
import habana_frameworks.torch.hpu # noqa: F401
temp_file = tempfile.mkstemp()[1]
backend_type = "hccl" if current_platform.is_hpu() else "nccl"
init_distributed_environment(
Expand Down
Loading

0 comments on commit 2520ec5

Please sign in to comment.