diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 009a9e95..5117698f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -44,6 +44,7 @@ repos: - 'markdown' - 'toml' - 'json' + - 'text' exclude: | (?x)^( tools/.*| diff --git a/README.md b/README.md index 6364acd5..650bd1ee 100644 --- a/README.md +++ b/README.md @@ -346,8 +346,9 @@ async def prompt(input_text: str) -> str: OpenLLM seamlessly integrates with HuggingFace Agents. > **Warning** The HuggingFace Agent is still at experimental stage. It is -> recommended to OpenLLM with `pip install 'openllm[nightly]'` to get the latest -> API update for HuggingFace agent. +> recommended to OpenLLM with +> `pip install -r nightly-requirements.generated.txt` to get the latest API +> update for HuggingFace agent. ```python import transformers diff --git a/nightly-requirements.generated.txt b/nightly-requirements.generated.txt new file mode 100644 index 00000000..76201d8c --- /dev/null +++ b/nightly-requirements.generated.txt @@ -0,0 +1,9 @@ +# This file is generated by `./tools/update-optional-dependencies.py` +# DO NOT EDIT +-e . +bentoml[grpc,io] @ git+https://github.com/bentoml/bentoml.git@main +peft @ git+https://github.com/huggingface/peft.git@main +transformers[torch,tokenizers,accelerate] @ git+https://github.com/huggingface/transformers.git@main +optimum @ git+https://github.com/huggingface/optimum.git@main +accelerate @ git+https://github.com/huggingface/accelerate.git@main +bitsandbytes @ git+https://github.com/TimDettmers/bitsandbytes.git@main diff --git a/pyproject.toml b/pyproject.toml index d42d9112..9508c386 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,23 +63,15 @@ all = [ "openllm[chatglm]", "openllm[starcoder]", "openllm[falcon]", - "openllm[flan-t5]", "openllm[agents]", "openllm[fine-tune]", "openllm[openai]", + "openllm[flan-t5]", ] chatglm = ["cpm_kernels", "sentencepiece"] falcon = ["einops", "xformers", "safetensors"] fine-tune = ["peft", "bitsandbytes", "datasets", "accelerate", "deepspeed"] flan-t5 = ["flax", "jax", "jaxlib", "tensorflow", "keras"] -nightly = [ - "bentoml[grpc,io] @ git+https://github.com/bentoml/bentoml.git@main", - "peft @ git+https://github.com/huggingface/peft.git@main", - "transformers[torch,tokenizers,accelerate] @ git+https://github.com/huggingface/transformers.git@main", - "optimum @ git+https://github.com/huggingface/optimum.git@main", - "accelerate @ git+https://github.com/huggingface/accelerate.git@main", - "bitsandbytes @ git+https://github.com/TimDettmers/bitsandbytes.git@main", -] openai = ["openai", "tiktoken"] starcoder = ["bitsandbytes"] diff --git a/tools/update-optional-dependencies.py b/tools/update-optional-dependencies.py index 2a229bdc..30c2d7b3 100755 --- a/tools/update-optional-dependencies.py +++ b/tools/update-optional-dependencies.py @@ -87,7 +87,6 @@ _NIGHTLY_MAPPING: dict[str, Dependencies] = { "bitsandbytes": Dependencies.from_tuple("bitsandbytes", "TimDettmers/bitsandbytes", "main", None), } -NIGHTLY_DEPS = [v.to_str() for v in _NIGHTLY_MAPPING.values()] FINE_TUNE_DEPS = ["peft", "bitsandbytes", "datasets", "accelerate", "deepspeed"] FLAN_T5_DEPS = ["flax", "jax", "jaxlib", "tensorflow", "keras"] OPENAI_DEPS = ["openai", "tiktoken"] @@ -117,8 +116,7 @@ def main() -> int: for name, config in _base_requirements.items(): table.add(name, config) - # ignore nightly for all - table.add("all", [f"openllm[{k}]" for k in table.keys() if k != "nightly"]) + table.add("all", [f"openllm[{k}]" for k in table.keys()]) pyproject["project"]["optional-dependencies"] = table @@ -127,6 +125,10 @@ def main() -> int: with open(os.path.join(ROOT, "pyproject.toml"), "w") as f: f.write(tomlkit.dumps(pyproject)) + with open(os.path.join(ROOT, "nightly-requirements.generated.txt"), "w") as f: + f.write("# This file is generated by `./tools/update-optional-dependencies.py`\n# DO NOT EDIT\n-e .\n") + f.writelines([f"{v.to_str()}\n" for v in _NIGHTLY_MAPPING.values()]) + if shutil.which("taplo"): return os.system(f"taplo fmt {os.path.join(ROOT, 'pyproject.toml')}")