Files
OpenLLM/WORKSPACE
Chaoyu dd8b6050b2 feat: FLAN-T5 supports
- add infrastructure, to be implemented: cache, chat history

- Base Runnable Implementation, that fits LangChain API

- Added a Prompt descriptor and utils.

feat: license headers and auto factory impl and CLI

Auto construct args from pydantic config

Add auto factory for ease of use

only provide `/generate` to streamline UX experience

CLI > envvar > input contract for configuration

fix: serve from a thread

fix CLI args

chore: cleanup names and refactor imports

Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com>
2023-05-03 17:50:14 -07:00

54 lines
1.2 KiB
Python

# TODO: Migrate to bzlmod once 6.0.0 is released.
workspace(name = "com_github_bentoml_bentoml")
load("//rules:deps.bzl", "bentoml_dependencies")
bentoml_dependencies()
load("@com_github_bentoml_plugins//rules:deps.bzl", "plugins_dependencies")
plugins_dependencies()
# NOTE: external users wish to use BentoML workspace setup
# should always be loaded in this order.
load("@com_github_bentoml_plugins//rules:workspace0.bzl", "workspace0")
workspace0()
load("@com_github_bentoml_plugins//rules:workspace1.bzl", "workspace1")
workspace1()
load("@com_github_bentoml_plugins//rules:workspace2.bzl", "workspace2")
workspace2()
load("@rules_python//python:pip.bzl", "pip_parse")
pip_parse(
name = "pypi",
requirements = "//requirements:bazel-requirements.lock.txt",
)
pip_parse(
name = "tensorflow",
requirements = "//requirements:bazel-tensorflow-requirements.lock.txt",
)
pip_parse(
name = "tests",
requirements = "//requirements:bazel-tests-requirements.lock.txt",
)
load("//rules/py/vendorred:pypi.bzl", pypi_deps = "install_deps")
pypi_deps()
load("//rules/py/vendorred:tests.bzl", tests_deps = "install_deps")
tests_deps()
load("//rules/py/vendorred:tensorflow.bzl", tensorflow_deps = "install_deps")
tensorflow_deps()