diff --git a/hatch.toml b/hatch.toml index bb7459b7..5e6bf91c 100644 --- a/hatch.toml +++ b/hatch.toml @@ -44,8 +44,8 @@ text = """ """ [[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]] path = "CHANGELOG.md" -start-after = "" pattern = "\n(###.+?\n)## " +start-after = "" [[metadata.hooks.fancy-pypi-readme.fragments]] text = """ @@ -139,7 +139,8 @@ write-summary-report = "python tools/write-coverage-report.py" [build.targets.wheel.hooks.mypyc] dependencies = [ "hatch-mypyc>=0.14.1", - "click>=8.1.6", + "mypy==1.4.1", + "click==8.1.3", # avoid https://github.com/pallets/click/issues/2558 "peft", "git+https://github.com/bentoml/BentoML.git@main", "git+https://github.com/huggingface/transformers.git@main", @@ -150,5 +151,6 @@ dependencies = [ ] enable-by-default = false mypy-args = ["--no-warn-unused-ignores"] +options = { debug_level = "0", strip_asserts = true } require-runtime-dependencies = true require-runtime-features = ["agents", "chatglm", "opt"] diff --git a/pyproject.toml b/pyproject.toml index 12c7615d..6e42aded 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -384,6 +384,7 @@ pretty = true python_version = "3.11" show_error_codes = true strict = true +warn_no_return = false warn_return_any = false warn_unreachable = true warn_unused_ignores = true @@ -398,8 +399,6 @@ module = [ "inflection.*", "huggingface_hub.*", "peft.*", - "auto_gptq.*", - "vllm.*", "orjson.*", "git.*", "httpx.*", diff --git a/src/openllm/models/chatglm/modeling_chatglm.py b/src/openllm/models/chatglm/modeling_chatglm.py index ed422852..cd335465 100644 --- a/src/openllm/models/chatglm/modeling_chatglm.py +++ b/src/openllm/models/chatglm/modeling_chatglm.py @@ -37,7 +37,7 @@ class ChatGLM(openllm.LLM["transformers.PreTrainedModel", "transformers.PreTrain def postprocess_generate(self, prompt: str, generation_result: tuple[str, list[tuple[str, str]]], *, chat_history: list[tuple[str, str]] | None = None, **attrs: t.Any): generated, history = generation_result if self.config.retain_history: - assert chat_history is not None, "'retain_history' is True while there is no history provided." + if chat_history is None: raise ValueError("'retain_history' is True while there is no history provided.") chat_history.extend(history) return generated