docs: add LangChain and BentoML Examples (#25)

Co-authored-by: aarnphm-ec2-dev <29749331+aarnphm@users.noreply.github.com>
This commit is contained in:
Chaoyu
2023-06-15 03:14:37 -07:00
committed by GitHub
parent 5e1445218b
commit dc50a2e7e5
12 changed files with 309 additions and 0 deletions

View File

@@ -0,0 +1,20 @@
# Copyright 2023 BentoML Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
service: "service:svc"
include:
- "*.py"
python:
packages:
- openllm

View File

@@ -0,0 +1,30 @@
# Copyright 2023 BentoML Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import bentoml
import openllm
from bentoml.io import Text
model = "dolly-v2"
llm_config = openllm.AutoConfig.for_model(model)
llm_runner = openllm.Runner(model, llm_config=llm_config)
svc = bentoml.Service(name="llm-service", runners=[llm_runner])
@svc.api(input=Text(), output=Text())
async def prompt(input_text: str) -> str:
answer = await llm_runner.generate(input_text)
return answer

View File

@@ -0,0 +1,31 @@
# LangChain + BentoML + OpenLLM
Run it locally:
```bash
export BENTOML_CONFIG_OPTIONS="api_server.traffic.timeout=900 runners.traffic.timeout=900"
bentoml serve
```
Build Bento:
```bash
bentoml build
```
Generate docker image:
```bash
bentoml containerize ...
docker run \
-e SERPAPI_API_KEY="__Your_SERP_API_key__" \
-e BENTOML_CONFIG_OPTIONS="api_server.traffic.timeout=900 runners.traffic.timeout=900" \
-p 3000:3000 \
..image_name
```

View File

@@ -0,0 +1,22 @@
# Copyright 2023 BentoML Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
service: "service:svc"
include:
- "*.py"
python:
packages:
- openllm
- langchain
- pydantic

View File

@@ -0,0 +1,13 @@
# Copyright 2023 BentoML Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,69 @@
# Copyright 2023 BentoML Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict
import bentoml
from bentoml.io import Text, JSON
from pydantic import BaseModel
from langchain.prompts import PromptTemplate
from langchain.chains import LLMChain
from langchain.llms import OpenLLM
class Query(BaseModel):
industry: str
product_name: str
keywords: list[str]
llm_config: Dict[str, Any]
llm = OpenLLM(
model_name="dolly-v2",
model_id="databricks/dolly-v2-7b",
embedded=False,
)
prompt = PromptTemplate(
input_variables=["industry", "product_name", "keywords"],
template="""
You are a Facebook Ads Copywriter with a strong background in persuasive
writing and marketing. You craft compelling copy that appeals to the target
audience's emotions and needs, peruading them to take action or make a
purchase. You are given the following context to create a facebook ad copy.
It should provide an attention-grabbing headline optimizied for capivating
leads and perusaive calls to action.
Industry: {industry}
Product: {product_name}
Keywords: {keywords}
Facebook Ads copy:
""",
)
chain = LLMChain(llm=llm, prompt=prompt)
svc = bentoml.Service("fb-ads-copy", runners=[llm.runner])
SAMPLE_INPUT = Query(
industry="SAAS",
product_name="BentoML",
keywords=["open source", "developer tool", "AI application platform", "serverless", "cost-efficient"],
llm_config=llm.runner.config.model_dump(),
)
@svc.api(input=JSON.from_sample(sample=SAMPLE_INPUT), output=Text())
def generate(query: Query):
return chain.run(
{"industry": query.industry, "product_name": query.product_name, "keywords": ", ".join(query.keywords)}
)

View File

@@ -0,0 +1,32 @@
# LangChain + BentoML + OpenLLM
Run it locally:
```bash
export SERPAPI_API_KEY="__Your_SERP_API_key__"
export BENTOML_CONFIG_OPTIONS="api_server.traffic.timeout=900 runners.traffic.timeout=900"
bentoml serve
```
Build Bento:
```bash
bentoml build
```
Generate docker image:
```bash
bentoml containerize ...
docker run \
-e SERPAPI_API_KEY="__Your_SERP_API_key__" \
-e BENTOML_CONFIG_OPTIONS="api_server.traffic.timeout=900 runners.traffic.timeout=900" \
-p 3000:3000 \
..image_name
```

View File

@@ -0,0 +1,19 @@
# Copyright 2023 BentoML Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
service: "service:svc"
include:
- "*.py"
python:
requirements_txt: "./requirements.txt"

View File

@@ -0,0 +1,22 @@
# Copyright 2023 BentoML Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
runners:
llm-dolly-v2-runner:
resources:
nvidia.com/gpu: 2
workers_per_resource: 0.5
llm-stablelm-runner:
resources:
nvidia.com/gpu: 1

View File

@@ -0,0 +1,13 @@
# Copyright 2023 BentoML Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@@ -0,0 +1,3 @@
openllm
langchain
google-search-results

View File

@@ -0,0 +1,35 @@
# Copyright 2023 BentoML Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import bentoml
from bentoml.io import Text
from langchain.agents import AgentType, initialize_agent, load_tools
from langchain.llms import OpenLLM
SAMPLE_INPUT = "What is the weather in San Francisco?"
llm = OpenLLM(
model_name="dolly-v2",
model_id="databricks/dolly-v2-7b",
embedded=False,
)
llm = OpenLLM(model_name="dolly-v2", embedded=False)
tools = load_tools(["serpapi"], llm=llm)
agent = initialize_agent(tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION)
svc = bentoml.Service("langchain-openllm", runners=[llm.runner])
@svc.api(input=Text.from_sample(SAMPLE_INPUT), output=Text())
def chat(input_text: str):
return agent.run(input_text)