diff --git a/examples/bentoml-demo/bentofile.yaml b/examples/bentoml-demo/bentofile.yaml new file mode 100644 index 00000000..12902394 --- /dev/null +++ b/examples/bentoml-demo/bentofile.yaml @@ -0,0 +1,20 @@ +# Copyright 2023 BentoML Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +service: "service:svc" +include: +- "*.py" +python: + packages: + - openllm diff --git a/examples/bentoml-demo/service.py b/examples/bentoml-demo/service.py new file mode 100644 index 00000000..d25255ec --- /dev/null +++ b/examples/bentoml-demo/service.py @@ -0,0 +1,30 @@ +# Copyright 2023 BentoML Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import bentoml +import openllm +from bentoml.io import Text + +model = "dolly-v2" + +llm_config = openllm.AutoConfig.for_model(model) +llm_runner = openllm.Runner(model, llm_config=llm_config) + +svc = bentoml.Service(name="llm-service", runners=[llm_runner]) + + +@svc.api(input=Text(), output=Text()) +async def prompt(input_text: str) -> str: + answer = await llm_runner.generate(input_text) + return answer diff --git a/examples/langchain-chains-demo/README.md b/examples/langchain-chains-demo/README.md new file mode 100644 index 00000000..b626d086 --- /dev/null +++ b/examples/langchain-chains-demo/README.md @@ -0,0 +1,31 @@ +# LangChain + BentoML + OpenLLM + + +Run it locally: +```bash +export BENTOML_CONFIG_OPTIONS="api_server.traffic.timeout=900 runners.traffic.timeout=900" +bentoml serve +``` + +Build Bento: +```bash +bentoml build +``` + +Generate docker image: + +```bash +bentoml containerize ... +docker run \ + -e SERPAPI_API_KEY="__Your_SERP_API_key__" \ + -e BENTOML_CONFIG_OPTIONS="api_server.traffic.timeout=900 runners.traffic.timeout=900" \ + -p 3000:3000 \ + ..image_name + +``` + + + + + + diff --git a/examples/langchain-chains-demo/bentofile.yaml b/examples/langchain-chains-demo/bentofile.yaml new file mode 100644 index 00000000..491996d7 --- /dev/null +++ b/examples/langchain-chains-demo/bentofile.yaml @@ -0,0 +1,22 @@ +# Copyright 2023 BentoML Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +service: "service:svc" +include: +- "*.py" +python: + packages: + - openllm + - langchain + - pydantic diff --git a/examples/langchain-chains-demo/download_model.py b/examples/langchain-chains-demo/download_model.py new file mode 100644 index 00000000..3a2faba5 --- /dev/null +++ b/examples/langchain-chains-demo/download_model.py @@ -0,0 +1,13 @@ +# Copyright 2023 BentoML Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/examples/langchain-chains-demo/service.py b/examples/langchain-chains-demo/service.py new file mode 100644 index 00000000..45940775 --- /dev/null +++ b/examples/langchain-chains-demo/service.py @@ -0,0 +1,69 @@ +# Copyright 2023 BentoML Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Dict + +import bentoml +from bentoml.io import Text, JSON +from pydantic import BaseModel +from langchain.prompts import PromptTemplate +from langchain.chains import LLMChain +from langchain.llms import OpenLLM + + +class Query(BaseModel): + industry: str + product_name: str + keywords: list[str] + llm_config: Dict[str, Any] + + +llm = OpenLLM( + model_name="dolly-v2", + model_id="databricks/dolly-v2-7b", + embedded=False, +) +prompt = PromptTemplate( + input_variables=["industry", "product_name", "keywords"], + template=""" +You are a Facebook Ads Copywriter with a strong background in persuasive +writing and marketing. You craft compelling copy that appeals to the target +audience's emotions and needs, peruading them to take action or make a +purchase. You are given the following context to create a facebook ad copy. +It should provide an attention-grabbing headline optimizied for capivating +leads and perusaive calls to action. + +Industry: {industry} +Product: {product_name} +Keywords: {keywords} +Facebook Ads copy: + """, +) +chain = LLMChain(llm=llm, prompt=prompt) + +svc = bentoml.Service("fb-ads-copy", runners=[llm.runner]) + +SAMPLE_INPUT = Query( + industry="SAAS", + product_name="BentoML", + keywords=["open source", "developer tool", "AI application platform", "serverless", "cost-efficient"], + llm_config=llm.runner.config.model_dump(), +) + + +@svc.api(input=JSON.from_sample(sample=SAMPLE_INPUT), output=Text()) +def generate(query: Query): + return chain.run( + {"industry": query.industry, "product_name": query.product_name, "keywords": ", ".join(query.keywords)} + ) diff --git a/examples/langchain-tools-demo/README.md b/examples/langchain-tools-demo/README.md new file mode 100644 index 00000000..b6562938 --- /dev/null +++ b/examples/langchain-tools-demo/README.md @@ -0,0 +1,32 @@ +# LangChain + BentoML + OpenLLM + + +Run it locally: +```bash +export SERPAPI_API_KEY="__Your_SERP_API_key__" +export BENTOML_CONFIG_OPTIONS="api_server.traffic.timeout=900 runners.traffic.timeout=900" +bentoml serve +``` + +Build Bento: +```bash +bentoml build +``` + +Generate docker image: + +```bash +bentoml containerize ... +docker run \ + -e SERPAPI_API_KEY="__Your_SERP_API_key__" \ + -e BENTOML_CONFIG_OPTIONS="api_server.traffic.timeout=900 runners.traffic.timeout=900" \ + -p 3000:3000 \ + ..image_name + +``` + + + + + + diff --git a/examples/langchain-tools-demo/bentofile.yaml b/examples/langchain-tools-demo/bentofile.yaml new file mode 100644 index 00000000..5d995ea7 --- /dev/null +++ b/examples/langchain-tools-demo/bentofile.yaml @@ -0,0 +1,19 @@ +# Copyright 2023 BentoML Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +service: "service:svc" +include: +- "*.py" +python: + requirements_txt: "./requirements.txt" diff --git a/examples/langchain-tools-demo/bentoml_configuration.yaml b/examples/langchain-tools-demo/bentoml_configuration.yaml new file mode 100644 index 00000000..e1c2e537 --- /dev/null +++ b/examples/langchain-tools-demo/bentoml_configuration.yaml @@ -0,0 +1,22 @@ +# Copyright 2023 BentoML Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +runners: + llm-dolly-v2-runner: + resources: + nvidia.com/gpu: 2 + workers_per_resource: 0.5 + llm-stablelm-runner: + resources: + nvidia.com/gpu: 1 diff --git a/examples/langchain-tools-demo/download_model.py b/examples/langchain-tools-demo/download_model.py new file mode 100644 index 00000000..3a2faba5 --- /dev/null +++ b/examples/langchain-tools-demo/download_model.py @@ -0,0 +1,13 @@ +# Copyright 2023 BentoML Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/examples/langchain-tools-demo/requirements.txt b/examples/langchain-tools-demo/requirements.txt new file mode 100644 index 00000000..147c5ea2 --- /dev/null +++ b/examples/langchain-tools-demo/requirements.txt @@ -0,0 +1,3 @@ +openllm +langchain +google-search-results diff --git a/examples/langchain-tools-demo/service.py b/examples/langchain-tools-demo/service.py new file mode 100644 index 00000000..fa736d04 --- /dev/null +++ b/examples/langchain-tools-demo/service.py @@ -0,0 +1,35 @@ +# Copyright 2023 BentoML Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import bentoml +from bentoml.io import Text +from langchain.agents import AgentType, initialize_agent, load_tools +from langchain.llms import OpenLLM + +SAMPLE_INPUT = "What is the weather in San Francisco?" + +llm = OpenLLM( + model_name="dolly-v2", + model_id="databricks/dolly-v2-7b", + embedded=False, +) +llm = OpenLLM(model_name="dolly-v2", embedded=False) +tools = load_tools(["serpapi"], llm=llm) +agent = initialize_agent(tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION) +svc = bentoml.Service("langchain-openllm", runners=[llm.runner]) + + +@svc.api(input=Text.from_sample(SAMPLE_INPUT), output=Text()) +def chat(input_text: str): + return agent.run(input_text)