fix(llm): build to include openllm_client

Signed-off-by: aarnphm-ec2-dev <29749331+aarnphm@users.noreply.github.com>
This commit is contained in:
aarnphm-ec2-dev
2023-06-08 19:19:50 +00:00
parent e6dd1b1c39
commit f84b975a55
2 changed files with 5 additions and 1 deletions

View File

@@ -88,6 +88,9 @@ path = "src/openllm/__about__.py"
[tool.hatch.metadata]
allow-direct-references = true
[tool.hatch.build.targets.wheel]
packages = ["src/openllm", "src/openllm_client"]
[tool.hatch.envs.default]
dependencies = [
"coverage[toml]>=6.5",

View File

@@ -26,6 +26,7 @@ from abc import ABC, ABCMeta, abstractmethod
import bentoml
import inflection
import orjson
from bentoml.types import ModelSignature, ModelSignatureDict
import openllm
@@ -486,7 +487,7 @@ class LLM(LLMInterface, metaclass=LLMMetaclass):
@property
def identifying_params(self) -> dict[str, t.Any]:
return {"configuration": self.config.model_dump(), "pretrained": self.pretrained}
return {"configuration": self.config.model_dump_json(), "pretrained": orjson.dumps(self.pretrained).decode()}
@t.overload
def make_tag(