chore: cleanup unused code path (#633)

we now rely on tokenizer.chat_templates to format prompts correctly

Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com>
This commit is contained in:
Aaron Pham
2023-11-13 17:23:07 -05:00
committed by GitHub
parent 67ee492715
commit a6387d1d15
16 changed files with 223 additions and 242 deletions

View File

@@ -173,20 +173,3 @@ class ModelCard:
class ModelList:
object: str = 'list'
data: t.List[ModelCard] = attr.field(factory=list)
async def get_conversation_prompt(request: ChatCompletionRequest, llm_config: openllm_core.LLMConfig) -> str:
conv = llm_config.get_conversation_template()
for message in request.messages:
msg_role = message['role']
if msg_role == 'system':
conv.set_system_message(message['content'])
elif msg_role == 'user':
conv.append_message(conv.roles[0], message['content'])
elif msg_role == 'assistant':
conv.append_message(conv.roles[1], message['content'])
else:
raise ValueError(f'Unknown role: {msg_role}')
# Add a blank message for the assistant.
conv.append_message(conv.roles[1], '')
return conv.get_prompt()