[build-system] build-backend = "hatchling.build" requires = [ "hatchling==1.18.0", "hatch-vcs==0.4.0", "hatch-fancy-pypi-readme==23.1.0", ] [project] authors = [ { name = "Aaron Pham", email = "aarnphm@bentoml.com" }, { name = "BentoML Team", email = "contact@bentoml.com" }, ] dynamic = ['readme', 'version'] classifiers = [ "Development Status :: 4 - Beta", "License :: OSI Approved :: Apache Software License", "Topic :: Scientific/Engineering", "Topic :: Software Development :: Libraries", "Operating System :: OS Independent", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "Intended Audience :: System Administrators", "Typing :: Typed", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ] description = "OpenLLM Client: Interacting with OpenLLM HTTP/gRPC server, or any BentoML server." keywords = [ "MLOps", "AI", "BentoML", "Model Serving", "Model Deployment", "LLMOps", "Falcon", "Vicuna", "Llama 2", "Fine tuning", "Serverless", "Large Language Model", "Generative AI", "StableLM", "Alpaca", "PyTorch", "Transformers", ] dependencies = ["openllm-core", "httpx", "distro", "anyio"] license = "Apache-2.0" name = "openllm-client" requires-python = ">=3.8" [project.urls] Blog = "https://modelserving.com" Chat = "https://l.bentoml.com/join-openllm-discord" Documentation = "https://github.com/bentoml/OpenLLM/blob/main/openllm-client/README.md" GitHub = "https://github.com/bentoml/OpenLLM/blob/main/openllm-client" History = "https://github.com/bentoml/OpenLLM/blob/main/CHANGELOG.md" Homepage = "https://bentoml.com" Tracker = "https://github.com/bentoml/OpenLLM/issues" Twitter = "https://twitter.com/bentomlai" [project.optional-dependencies] full = ["openllm-client[grpc,agents,auth]"] grpc = ["bentoml[grpc]>=1.1.11,<1.2"] auth = ['httpx_auth'] agents = ["transformers[agents]>=4.30", "diffusers", "soundfile"] [tool.hatch.version] fallback-version = "0.0.0" source = "vcs" [tool.hatch.build.hooks.vcs] version-file = "src/openllm_client/_version.py" [tool.hatch.version.raw-options] git_describe_command = [ "git", "describe", "--dirty", "--tags", "--long", "--first-parent", ] local_scheme = "no-local-version" root = ".." [tool.hatch.metadata] allow-direct-references = true [tool.hatch.build.targets.wheel] only-include = ["src/openllm_client"] sources = ["src"] [tool.hatch.build.targets.sdist] exclude = [ "/.git_archival.txt", "tests", "/.python-version-default", "/generate-grpc-stubs", "/dev.Dockerfile", ] [tool.hatch.metadata.hooks.fancy-pypi-readme] content-type = "text/markdown" # PyPI doesn't support the tag. [[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]] text = """

Banner for OpenLLM

""" [[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]] end-before = "\n" path = "README.md" start-after = "\n" [[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]] text = """

Gif showing OpenLLM Intro

""" [[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]] text = """

Gif showing Agent integration

""" [[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]] end-before = "\n" path = "README.md" start-after = "\n" [[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]] text = """ --- [Click me for full changelog](https://github.com/bentoml/openllm/blob/main/CHANGELOG.md) """