From f89bec261c96ec31b0af3708345bfe70d391b0c5 Mon Sep 17 00:00:00 2001 From: Aaron Pham <29749331+aarnphm@users.noreply.github.com> Date: Fri, 10 Nov 2023 03:32:12 -0500 Subject: [PATCH] fix: correct importmodules locally (#601) Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com> --- openllm-python/src/openllm/serialisation/__init__.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/openllm-python/src/openllm/serialisation/__init__.py b/openllm-python/src/openllm/serialisation/__init__.py index ab9d9858..76e6922f 100644 --- a/openllm-python/src/openllm/serialisation/__init__.py +++ b/openllm-python/src/openllm/serialisation/__init__.py @@ -1,6 +1,5 @@ from __future__ import annotations import importlib -import typing as t import cloudpickle import fs @@ -10,11 +9,6 @@ import openllm from openllm_core._typing_compat import ParamSpec -if t.TYPE_CHECKING: - import transformers as transformers -else: - transformers = openllm.utils.LazyLoader('transformers', globals(), 'transformers') - P = ParamSpec('P') @@ -24,6 +18,8 @@ def load_tokenizer(llm, **tokenizer_attrs): By default, it will try to find the bentomodel whether it is in store.. If model is not found, it will raises a ``bentoml.exceptions.NotFound``. """ + from transformers import AutoTokenizer + tokenizer_attrs = {**llm.llm_parameters[-1], **tokenizer_attrs} from bentoml._internal.models.model import CUSTOM_OBJECTS_FILENAME @@ -42,7 +38,7 @@ def load_tokenizer(llm, **tokenizer_attrs): 'For example: "bentoml.transformers.save_model(..., custom_objects={\'tokenizer\': tokenizer})"' ) from None else: - tokenizer = transformers.AutoTokenizer.from_pretrained( + tokenizer = AutoTokenizer.from_pretrained( bentomodel_fs.getsyspath('/'), trust_remote_code=llm.trust_remote_code, **tokenizer_attrs )