Procházet zdrojové kódy

fix: `prompt_logprobs==0` case

AlpinDale před 7 měsíci
rodič
revize
e321d80e4e

+ 1 - 1
aphrodite/endpoints/openai/serving_completions.py

@@ -307,7 +307,7 @@ class OpenAIServingCompletion(OpenAIServing):
                 elif request.echo and request.max_tokens > 0:
                     token_ids = prompt_token_ids + output.token_ids
                     top_logprobs = (prompt_logprobs + output.logprobs
-                                    if request.logprobs else None)
+                                    if request.logprobs is not None else None)
                     output_text = prompt_text + output.text
                 else:
                     token_ids = output.token_ids

+ 1 - 1
aphrodite/modeling/sampling_metadata.py

@@ -232,7 +232,7 @@ def _prepare_seq_groups(
         logits = hidden_states[selected_token_indices]
         """
 
-        if sampling_params.prompt_logprobs:
+        if sampling_params.prompt_logprobs is not None:
             selected_token_indices.extend(
                 range(model_output_idx, model_output_idx + prompt_logprob_len))
         model_output_idx += prompt_logprob_len

+ 1 - 1
aphrodite/task_handler/model_runner.py

@@ -447,7 +447,7 @@ class ModelRunner:
                     [lora_id] *
                     (query_len if seq_group_metadata.sampling_params
                      and seq_group_metadata.sampling_params.prompt_logprobs
-                     else 1))
+                     is not None else 1))
 
                 mm_data = seq_group_metadata.multi_modal_data
                 if mm_data is not None: