mirror of
https://github.com/bentoml/OpenLLM.git
synced 2026-01-30 18:32:18 -05:00
fix(build): correct module type for stubs and strip assert [skip ci]
Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com>
This commit is contained in:
@@ -44,8 +44,8 @@ text = """
|
||||
"""
|
||||
[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]]
|
||||
path = "CHANGELOG.md"
|
||||
start-after = "<!-- towncrier release notes start -->"
|
||||
pattern = "\n(###.+?\n)## "
|
||||
start-after = "<!-- towncrier release notes start -->"
|
||||
[[metadata.hooks.fancy-pypi-readme.fragments]]
|
||||
text = """
|
||||
|
||||
@@ -139,7 +139,8 @@ write-summary-report = "python tools/write-coverage-report.py"
|
||||
[build.targets.wheel.hooks.mypyc]
|
||||
dependencies = [
|
||||
"hatch-mypyc>=0.14.1",
|
||||
"click>=8.1.6",
|
||||
"mypy==1.4.1",
|
||||
"click==8.1.3", # avoid https://github.com/pallets/click/issues/2558
|
||||
"peft",
|
||||
"git+https://github.com/bentoml/BentoML.git@main",
|
||||
"git+https://github.com/huggingface/transformers.git@main",
|
||||
@@ -150,5 +151,6 @@ dependencies = [
|
||||
]
|
||||
enable-by-default = false
|
||||
mypy-args = ["--no-warn-unused-ignores"]
|
||||
options = { debug_level = "0", strip_asserts = true }
|
||||
require-runtime-dependencies = true
|
||||
require-runtime-features = ["agents", "chatglm", "opt"]
|
||||
|
||||
@@ -384,6 +384,7 @@ pretty = true
|
||||
python_version = "3.11"
|
||||
show_error_codes = true
|
||||
strict = true
|
||||
warn_no_return = false
|
||||
warn_return_any = false
|
||||
warn_unreachable = true
|
||||
warn_unused_ignores = true
|
||||
@@ -398,8 +399,6 @@ module = [
|
||||
"inflection.*",
|
||||
"huggingface_hub.*",
|
||||
"peft.*",
|
||||
"auto_gptq.*",
|
||||
"vllm.*",
|
||||
"orjson.*",
|
||||
"git.*",
|
||||
"httpx.*",
|
||||
|
||||
@@ -37,7 +37,7 @@ class ChatGLM(openllm.LLM["transformers.PreTrainedModel", "transformers.PreTrain
|
||||
def postprocess_generate(self, prompt: str, generation_result: tuple[str, list[tuple[str, str]]], *, chat_history: list[tuple[str, str]] | None = None, **attrs: t.Any):
|
||||
generated, history = generation_result
|
||||
if self.config.retain_history:
|
||||
assert chat_history is not None, "'retain_history' is True while there is no history provided."
|
||||
if chat_history is None: raise ValueError("'retain_history' is True while there is no history provided.")
|
||||
chat_history.extend(history)
|
||||
return generated
|
||||
|
||||
|
||||
Reference in New Issue
Block a user