From 97a6be95ba279c2bc9cdd8890506ec94e63b268d Mon Sep 17 00:00:00 2001 From: Andrew Wang Date: Mon, 12 Aug 2024 19:29:34 -0700 Subject: [PATCH] [Misc] improve logits processors logging message (#7435) --- tests/entrypoints/openai/test_serving_chat.py | 1 - vllm/entrypoints/openai/logits_processors.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/entrypoints/openai/test_serving_chat.py b/tests/entrypoints/openai/test_serving_chat.py index 168ba7ba88..3783b7cd66 100644 --- a/tests/entrypoints/openai/test_serving_chat.py +++ b/tests/entrypoints/openai/test_serving_chat.py @@ -73,7 +73,6 @@ def test_serving_chat_should_set_correct_max_tokens(): with suppress(Exception): asyncio.run(serving_chat.create_chat_completion(req)) - # AsyncLLMEngine.generate(inputs, sampling_params, ...) assert mock_engine.generate.call_args.args[1].max_tokens == 93 req.max_tokens = 10 diff --git a/vllm/entrypoints/openai/logits_processors.py b/vllm/entrypoints/openai/logits_processors.py index c0cd820e30..c470c32c27 100644 --- a/vllm/entrypoints/openai/logits_processors.py +++ b/vllm/entrypoints/openai/logits_processors.py @@ -71,7 +71,7 @@ def get_logits_processors( # Check if token_id is within the vocab size for token_id, bias in clamped_logit_bias.items(): if token_id < 0 or token_id >= tokenizer.vocab_size: - raise ValueError("token_id in logit_bias contains " + raise ValueError(f"token_id {token_id} in logit_bias contains " "out-of-vocab token id") logits_processors.append(