mirror of
https://github.com/bentoml/OpenLLM.git
synced 2026-03-06 16:16:37 -05:00
refactor: packages (#249)
This commit is contained in:
@@ -2,14 +2,13 @@ from __future__ import annotations
|
||||
import sys, typing as t
|
||||
from openllm.exceptions import MissingDependencyError
|
||||
from openllm.utils import LazyModule, is_torch_available, is_vllm_available
|
||||
from openllm_core.config.configuration_starcoder import (
|
||||
DEFAULT_PROMPT_TEMPLATE as DEFAULT_PROMPT_TEMPLATE,
|
||||
START_STARCODER_COMMAND_DOCSTRING as START_STARCODER_COMMAND_DOCSTRING,
|
||||
StarCoderConfig as StarCoderConfig,
|
||||
)
|
||||
|
||||
_import_structure: dict[str, list[str]] = {"configuration_starcoder": ["StarCoderConfig", "START_STARCODER_COMMAND_DOCSTRING", "DEFAULT_PROMPT_TEMPLATE"]}
|
||||
if t.TYPE_CHECKING:
|
||||
from .configuration_starcoder import (
|
||||
DEFAULT_PROMPT_TEMPLATE as DEFAULT_PROMPT_TEMPLATE,
|
||||
START_STARCODER_COMMAND_DOCSTRING as START_STARCODER_COMMAND_DOCSTRING,
|
||||
StarCoderConfig as StarCoderConfig,
|
||||
)
|
||||
_import_structure: dict[str, list[str]] = {}
|
||||
try:
|
||||
if not is_torch_available(): raise MissingDependencyError
|
||||
except MissingDependencyError: pass
|
||||
|
||||
Reference in New Issue
Block a user