From eb563fd49c75086ab70bd2d6fcdc9ed9d3e71ffb Mon Sep 17 00:00:00 2001 From: maang Date: Thu, 27 Nov 2025 11:27:42 +0800 Subject: [PATCH 1/3] [docs]Improve `priority` parameter documentation Signed-off-by: maang --- vllm/entrypoints/llm.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/vllm/entrypoints/llm.py b/vllm/entrypoints/llm.py index 1860f383d45f..9cd7219dee22 100644 --- a/vllm/entrypoints/llm.py +++ b/vllm/entrypoints/llm.py @@ -405,6 +405,8 @@ def generate( lora_request: LoRA request to use for generation, if any. priority: The priority of the requests, if any. Only applicable when priority scheduling policy is enabled. + If provided, must be a list matching the length of `prompts`, + where each priority value corresponds to the prompt at the same index. Returns: A list of `RequestOutput` objects containing the From 3c80f3db33879c9974e8ce7bd87800b5ae26b9d1 Mon Sep 17 00:00:00 2001 From: maang Date: Thu, 27 Nov 2025 12:13:38 +0800 Subject: [PATCH 2/3] [docs] Indicate the type of list element Signed-off-by: maang --- vllm/entrypoints/llm.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/vllm/entrypoints/llm.py b/vllm/entrypoints/llm.py index 9cd7219dee22..63a09d6a45cf 100644 --- a/vllm/entrypoints/llm.py +++ b/vllm/entrypoints/llm.py @@ -405,8 +405,9 @@ def generate( lora_request: LoRA request to use for generation, if any. priority: The priority of the requests, if any. Only applicable when priority scheduling policy is enabled. - If provided, must be a list matching the length of `prompts`, - where each priority value corresponds to the prompt at the same index. + If provided, must be a list of integers matching the length + of `prompts`,where each priority value corresponds to the prompt + at the same index. Returns: A list of `RequestOutput` objects containing the From 52707cca5a0685d210ce3376f2b95d1e9c77af0b Mon Sep 17 00:00:00 2001 From: maang-h <55082429+maang-h@users.noreply.github.com> Date: Thu, 27 Nov 2025 17:54:18 +0800 Subject: [PATCH 3/3] Update vllm/entrypoints/llm.py Co-authored-by: Cyrus Leung Signed-off-by: maang-h <55082429+maang-h@users.noreply.github.com> --- vllm/entrypoints/llm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vllm/entrypoints/llm.py b/vllm/entrypoints/llm.py index 63a09d6a45cf..f6ee74678998 100644 --- a/vllm/entrypoints/llm.py +++ b/vllm/entrypoints/llm.py @@ -406,7 +406,7 @@ def generate( priority: The priority of the requests, if any. Only applicable when priority scheduling policy is enabled. If provided, must be a list of integers matching the length - of `prompts`,where each priority value corresponds to the prompt + of `prompts`, where each priority value corresponds to the prompt at the same index. Returns: