mirror of
https://github.com/bentoml/OpenLLM.git
synced 2026-02-19 07:06:02 -05:00
feat(openai): chat templates and complete control of prompt generation (#725)
* feat(openai): chat templates and complete control of prompt generation Signed-off-by: Aaron Pham <29749331+aarnphm@users.noreply.github.com> * fix: correctly use base chat templates Signed-off-by: Aaron Pham <29749331+aarnphm@users.noreply.github.com> * fix: remove symlink Signed-off-by: Aaron Pham <29749331+aarnphm@users.noreply.github.com> --------- Signed-off-by: Aaron Pham <29749331+aarnphm@users.noreply.github.com>
This commit is contained in:
@@ -62,6 +62,9 @@ class ChatCompletionRequest:
|
||||
# supported by vLLM and us
|
||||
top_k: t.Optional[int] = attr.field(default=None)
|
||||
best_of: t.Optional[int] = attr.field(default=1)
|
||||
# Additional features to support chat_template
|
||||
chat_template: str = attr.field(default=None)
|
||||
add_generation_prompt: bool = attr.field(default=True)
|
||||
|
||||
|
||||
@attr.define
|
||||
|
||||
Reference in New Issue
Block a user