[Bugfix] chat method add_generation_prompt param (#7734)
This commit is contained in:
parent
9b73a2f498
commit
d3c002eadc
@ -353,7 +353,7 @@ class LLM:
|
|||||||
use_tqdm: bool = True,
|
use_tqdm: bool = True,
|
||||||
lora_request: Optional[LoRARequest] = None,
|
lora_request: Optional[LoRARequest] = None,
|
||||||
chat_template: Optional[str] = None,
|
chat_template: Optional[str] = None,
|
||||||
add_generation_template: bool = True,
|
add_generation_prompt: bool = True,
|
||||||
) -> List[RequestOutput]:
|
) -> List[RequestOutput]:
|
||||||
"""
|
"""
|
||||||
Generates responses for chat messages.
|
Generates responses for chat messages.
|
||||||
@ -374,7 +374,7 @@ class LLM:
|
|||||||
lora_request: LoRA request to use for generation, if any.
|
lora_request: LoRA request to use for generation, if any.
|
||||||
chat_template: The template to use for structuring the chat.
|
chat_template: The template to use for structuring the chat.
|
||||||
If not provided, the model's default chat template will be used.
|
If not provided, the model's default chat template will be used.
|
||||||
add_generation_template: If True, adds a generation template
|
add_generation_prompt: If True, adds a generation template
|
||||||
to each message.
|
to each message.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@ -392,7 +392,7 @@ class LLM:
|
|||||||
tokenizer,
|
tokenizer,
|
||||||
conversations,
|
conversations,
|
||||||
chat_template=chat_template,
|
chat_template=chat_template,
|
||||||
add_generation_template=add_generation_template)
|
add_generation_prompt=add_generation_prompt)
|
||||||
|
|
||||||
return self.generate(
|
return self.generate(
|
||||||
prompts,
|
prompts,
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user