Files
OpenLLM/openllm-python/tests/models_test.py
Aaron Pham 7438005c04 refactor(config): simplify configuration and update start CLI output (#611)
* chore(config): simplify configuration and update start CLI output
handling

Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com>

* chore: remove state and message sent after server lifecycle

Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com>

* chore: update color stream and refactor reusable logic

Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com>

* chore: update documentations and mypy

Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com>

---------

Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com>
2023-11-11 22:36:10 -05:00

31 lines
902 B
Python

from __future__ import annotations
import os
import typing as t
import pytest
if t.TYPE_CHECKING:
import openllm
@pytest.mark.skipif(os.getenv('GITHUB_ACTIONS') is not None, reason='Model is too large for CI')
def test_flan_t5_implementation(prompt: str, llm: openllm.LLM[t.Any, t.Any]):
assert llm.generate(prompt)
assert llm.generate(prompt, temperature=0.8, top_p=0.23)
@pytest.mark.skipif(os.getenv('GITHUB_ACTIONS') is not None, reason='Model is too large for CI')
def test_opt_implementation(prompt: str, llm: openllm.LLM[t.Any, t.Any]):
assert llm.generate(prompt)
assert llm.generate(prompt, temperature=0.9, top_k=8)
@pytest.mark.skipif(os.getenv('GITHUB_ACTIONS') is not None, reason='Model is too large for CI')
def test_baichuan_implementation(prompt: str, llm: openllm.LLM[t.Any, t.Any]):
assert llm.generate(prompt)
assert llm.generate(prompt, temperature=0.95)