From 9a6af9735693fcbca5af8eb6e5f9fa4cba271ea0 Mon Sep 17 00:00:00 2001 From: "Aaron Pham [bot]" <29749331+aarnphm@users.noreply.github.com> Date: Mon, 19 Jun 2023 17:27:52 +0000 Subject: [PATCH] infra: prepare for release 0.1.7 [generated] Signed-off-by: Aaron Pham [bot] <29749331+aarnphm@users.noreply.github.com> --- CHANGELOG.md | 34 ++++++++++++++++++++++++++++++++++ changelog.d/29.feature.md | 27 --------------------------- package.json | 2 +- src/openllm/__about__.py | 2 +- 4 files changed, 36 insertions(+), 29 deletions(-) delete mode 100644 changelog.d/29.feature.md diff --git a/CHANGELOG.md b/CHANGELOG.md index af5ad15e..7bd0a69a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,40 @@ This changelog is managed by towncrier and is compiled at release time. +## [0.1.7](https://github.com/bentoml/openllm/tree/v0.1.7) + +### Features + +- OpenLLM now seamlessly integrates with HuggingFace Agents. + Replace the HfAgent endpoint with a running remote server. + + ```python + import transformers + + agent = transformers.HfAgent("http://localhost:3000/hf/agent") # URL that runs the OpenLLM server + + agent.run("Is the following `text` positive or negative?", text="I don't like how this models is generate inputs") + ``` + + Note that only `starcoder` is currently supported for agent feature. + + To use it from the `openllm.client`, do: + ```python + import openllm + + client = openllm.client.HTTPClient("http://123.23.21.1:3000") + + client.ask_agent( + task="Is the following `text` positive or negative?", + text="What are you thinking about?", + agent_type="hf", + ) + ``` + + Fixes a Asyncio exception by increasing the timeout + [#29](https://github.com/bentoml/openllm/issues/29) + + ## [0.1.6](https://github.com/bentoml/openllm/tree/v0.1.6) ### Changes diff --git a/changelog.d/29.feature.md b/changelog.d/29.feature.md deleted file mode 100644 index 85522006..00000000 --- a/changelog.d/29.feature.md +++ /dev/null @@ -1,27 +0,0 @@ -OpenLLM now seamlessly integrates with HuggingFace Agents. -Replace the HfAgent endpoint with a running remote server. - -```python -import transformers - -agent = transformers.HfAgent("http://localhost:3000/hf/agent") # URL that runs the OpenLLM server - -agent.run("Is the following `text` positive or negative?", text="I don't like how this models is generate inputs") -``` - -Note that only `starcoder` is currently supported for agent feature. - -To use it from the `openllm.client`, do: -```python -import openllm - -client = openllm.client.HTTPClient("http://123.23.21.1:3000") - -client.ask_agent( - task="Is the following `text` positive or negative?", - text="What are you thinking about?", - agent_type="hf", -) -``` - -Fixes a Asyncio exception by increasing the timeout diff --git a/package.json b/package.json index 2e364689..2495e2b1 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "openllm", - "version": "0.1.7.dev0", + "version": "0.1.7", "description": "OpenLLM: Your one stop-and-go solution for serving Large Language Model", "repository": "git@github.com:llmsys/OpenLLM.git", "author": "Aaron Pham <29749331+aarnphm@users.noreply.github.com>", diff --git a/src/openllm/__about__.py b/src/openllm/__about__.py index 28f380b9..a1034897 100644 --- a/src/openllm/__about__.py +++ b/src/openllm/__about__.py @@ -11,4 +11,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "0.1.7.dev0" +__version__ = "0.1.7"