mirror of
https://github.com/mudler/LocalAI.git
synced 2026-02-03 11:13:31 -05:00
Compare commits
123 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9099d0c77e | ||
|
|
b69614c2b3 | ||
|
|
068b90a6dc | ||
|
|
0586fe2d9c | ||
|
|
f1e03bf474 | ||
|
|
7f0093b2c9 | ||
|
|
e8431d62a2 | ||
|
|
adafd7cf23 | ||
|
|
6daef00d30 | ||
|
|
a0cdd19038 | ||
|
|
d454118887 | ||
|
|
356f23bacb | ||
|
|
196c249367 | ||
|
|
e2a8dd64db | ||
|
|
20a5b20b59 | ||
|
|
06d0d00231 | ||
|
|
62c7f745ca | ||
|
|
551faa8ddb | ||
|
|
2c041a2077 | ||
|
|
c4af769d4f | ||
|
|
b425a870b0 | ||
|
|
b59e16742e | ||
|
|
947224b952 | ||
|
|
20cd8814c1 | ||
|
|
ce8045f521 | ||
|
|
1bf5a11437 | ||
|
|
2daa5e6be0 | ||
|
|
b91aa288b5 | ||
|
|
43187d1aba | ||
|
|
97b730e238 | ||
|
|
d11ed5287b | ||
|
|
81ac490202 | ||
|
|
e53dd4a57b | ||
|
|
d274df2fe2 | ||
|
|
0b3a55b9fe | ||
|
|
abd5eea66d | ||
|
|
65c3df392c | ||
|
|
57908df956 | ||
|
|
26e522a558 | ||
|
|
817685e4c1 | ||
|
|
bcad3f3018 | ||
|
|
303370ad87 | ||
|
|
a9fb7174ba | ||
|
|
6d6f50340f | ||
|
|
6a136b2a4b | ||
|
|
8f7045cfa6 | ||
|
|
61c964dce7 | ||
|
|
48d621c64e | ||
|
|
661dbbf2b4 | ||
|
|
254f644c5f | ||
|
|
88edb1e2af | ||
|
|
640a3f1bfe | ||
|
|
b1243453f4 | ||
|
|
dfc651f643 | ||
|
|
d4978383ff | ||
|
|
cde0139363 | ||
|
|
3d4bb757d2 | ||
|
|
a4e749c22f | ||
|
|
25a9685e2f | ||
|
|
94d417c2b7 | ||
|
|
b897d47e0f | ||
|
|
3422d21346 | ||
|
|
a7917a2150 | ||
|
|
7b23b894b4 | ||
|
|
15c083f731 | ||
|
|
293eaad69d | ||
|
|
605126db8a | ||
|
|
3980beabd7 | ||
|
|
11d3ce9edb | ||
|
|
14cb620cd8 | ||
|
|
841dfefd62 | ||
|
|
d1cb2467fd | ||
|
|
a8e10f03e9 | ||
|
|
94010a0a44 | ||
|
|
75bc933dc4 | ||
|
|
8de0f21f7c | ||
|
|
66b03b54cb | ||
|
|
9ea8159683 | ||
|
|
c33083aeca | ||
|
|
eb34f838f8 | ||
|
|
8327e85e34 | ||
|
|
a8c08d83d0 | ||
|
|
e314cdcdde | ||
|
|
4528e969c9 | ||
|
|
175ae751ba | ||
|
|
43bfdc9561 | ||
|
|
546dce68a6 | ||
|
|
82db2fa425 | ||
|
|
a27af2d7ad | ||
|
|
9f43f37150 | ||
|
|
3ad920b50a | ||
|
|
dbe7ac484c | ||
|
|
d9905ba050 | ||
|
|
dd2e243997 | ||
|
|
fd905b483b | ||
|
|
9c5cd9b38b | ||
|
|
07ce0a3c17 | ||
|
|
5be2d22117 | ||
|
|
e88468640f | ||
|
|
81890e76a0 | ||
|
|
a91c2e7aaa | ||
|
|
7748eb6553 | ||
|
|
835932e95e | ||
|
|
ae1ec4e096 | ||
|
|
c75ecfa009 | ||
|
|
8737a65760 | ||
|
|
418c582430 | ||
|
|
6fd0341eca | ||
|
|
ccc7cb0287 | ||
|
|
a1d6cc93a8 | ||
|
|
dc14d80f51 | ||
|
|
b8eb10b6b7 | ||
|
|
0f6b4513bf | ||
|
|
6f0c936f74 | ||
|
|
42136b6f27 | ||
|
|
2810e3ea5c | ||
|
|
11d34e38dc | ||
|
|
06951cdd6b | ||
|
|
103af480c7 | ||
|
|
db401b4d84 | ||
|
|
e0c876aae1 | ||
|
|
5e0847b3d7 | ||
|
|
ee5ca49bc1 |
11
.bruno/LocalAI Test Requests/model gallery/model delete.bru
Normal file
11
.bruno/LocalAI Test Requests/model gallery/model delete.bru
Normal file
@@ -0,0 +1,11 @@
|
||||
meta {
|
||||
name: model delete
|
||||
type: http
|
||||
seq: 7
|
||||
}
|
||||
|
||||
post {
|
||||
url: {{PROTOCOL}}{{HOST}}:{{PORT}}/models/galleries
|
||||
body: none
|
||||
auth: none
|
||||
}
|
||||
BIN
.bruno/LocalAI Test Requests/transcription/gb1.ogg
Normal file
BIN
.bruno/LocalAI Test Requests/transcription/gb1.ogg
Normal file
Binary file not shown.
16
.bruno/LocalAI Test Requests/transcription/transcribe.bru
Normal file
16
.bruno/LocalAI Test Requests/transcription/transcribe.bru
Normal file
@@ -0,0 +1,16 @@
|
||||
meta {
|
||||
name: transcribe
|
||||
type: http
|
||||
seq: 1
|
||||
}
|
||||
|
||||
post {
|
||||
url: {{PROTOCOL}}{{HOST}}:{{PORT}}/v1/audio/transcriptions
|
||||
body: multipartForm
|
||||
auth: none
|
||||
}
|
||||
|
||||
body:multipart-form {
|
||||
file: @file(transcription/gb1.ogg)
|
||||
model: whisper-1
|
||||
}
|
||||
7
.github/workflows/test-extra.yml
vendored
7
.github/workflows/test-extra.yml
vendored
@@ -123,6 +123,13 @@ jobs:
|
||||
run: |
|
||||
make --jobs=5 --output-sync=target -C backend/python/parler-tts
|
||||
make --jobs=5 --output-sync=target -C backend/python/parler-tts test
|
||||
- name: Setup tmate session if tests fail
|
||||
if: ${{ failure() }}
|
||||
uses: mxschmitt/action-tmate@v3.19
|
||||
with:
|
||||
detached: true
|
||||
connect-timeout-seconds: 180
|
||||
limit-access-to-actor: true
|
||||
|
||||
tests-openvoice:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -224,7 +224,7 @@ jobs:
|
||||
- name: Dependencies
|
||||
run: |
|
||||
brew install protobuf grpc make protoc-gen-go protoc-gen-go-grpc libomp llvm
|
||||
pip install --user --no-cache-dir grpcio-tools==1.64.1
|
||||
pip install --user --no-cache-dir grpcio-tools
|
||||
- name: Test
|
||||
run: |
|
||||
export C_INCLUDE_PATH=/usr/local/include
|
||||
|
||||
@@ -85,7 +85,8 @@ WORKDIR /build
|
||||
# The requirements-extras target is for any builds with IMAGE_TYPE=extras. It should not be placed in this target unless every IMAGE_TYPE=extras build will use it
|
||||
FROM requirements-core AS requirements-extras
|
||||
|
||||
RUN curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
# Install uv as a system package
|
||||
RUN curl -LsSf https://astral.sh/uv/install.sh | UV_INSTALL_DIR=/usr/bin sh
|
||||
ENV PATH="/root/.cargo/bin:${PATH}"
|
||||
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
|
||||
6
Makefile
6
Makefile
@@ -8,7 +8,7 @@ DETECT_LIBS?=true
|
||||
# llama.cpp versions
|
||||
GOLLAMA_REPO?=https://github.com/go-skynet/go-llama.cpp
|
||||
GOLLAMA_VERSION?=2b57a8ae43e4699d3dc5d1496a1ccd42922993be
|
||||
CPPLLAMA_VERSION?=45f097645efb11b6d09a5b4adbbfd7c312ac0126
|
||||
CPPLLAMA_VERSION?=6423c65aa8be1b98f990cf207422505ac5a441a1
|
||||
|
||||
# go-rwkv version
|
||||
RWKV_REPO?=https://github.com/donomii/go-rwkv.cpp
|
||||
@@ -16,7 +16,7 @@ RWKV_VERSION?=661e7ae26d442f5cfebd2a0881b44e8c55949ec6
|
||||
|
||||
# whisper.cpp version
|
||||
WHISPER_REPO?=https://github.com/ggerganov/whisper.cpp
|
||||
WHISPER_CPP_VERSION?=a5abfe6a90495f7bf19fe70d016ecc255e97359c
|
||||
WHISPER_CPP_VERSION?=31aea563a83803c710691fed3e8d700e06ae6788
|
||||
|
||||
# bert.cpp version
|
||||
BERT_REPO?=https://github.com/go-skynet/go-bert.cpp
|
||||
@@ -24,7 +24,7 @@ BERT_VERSION?=710044b124545415f555e4260d16b146c725a6e4
|
||||
|
||||
# go-piper version
|
||||
PIPER_REPO?=https://github.com/mudler/go-piper
|
||||
PIPER_VERSION?=9d0100873a7dbb0824dfea40e8cec70a1b110759
|
||||
PIPER_VERSION?=e10ca041a885d4a8f3871d52924b47792d5e5aa0
|
||||
|
||||
# stablediffusion version
|
||||
STABLEDIFFUSION_REPO?=https://github.com/mudler/go-stable-diffusion
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
|
||||
> :bulb: Get help - [❓FAQ](https://localai.io/faq/) [💭Discussions](https://github.com/go-skynet/LocalAI/discussions) [:speech_balloon: Discord](https://discord.gg/uJAeKSAGDy) [:book: Documentation website](https://localai.io/)
|
||||
>
|
||||
> [💻 Quickstart](https://localai.io/basics/getting_started/) [🖼️ Models](https://models.localai.io/) [🚀 Roadmap](https://github.com/mudler/LocalAI/issues?q=is%3Aissue+is%3Aopen+label%3Aroadmap) [🥽 Demo](https://demo.localai.io) [🌍 Explorer](https://explorer.localai.io) [🛫 Examples](https://github.com/go-skynet/LocalAI/tree/master/examples/)
|
||||
> [💻 Quickstart](https://localai.io/basics/getting_started/) [🖼️ Models](https://models.localai.io/) [🚀 Roadmap](https://github.com/mudler/LocalAI/issues?q=is%3Aissue+is%3Aopen+label%3Aroadmap) [🥽 Demo](https://demo.localai.io) [🌍 Explorer](https://explorer.localai.io) [🛫 Examples](https://github.com/mudler/LocalAI-examples)
|
||||
|
||||
[](https://github.com/go-skynet/LocalAI/actions/workflows/test.yml)[](https://github.com/go-skynet/LocalAI/actions/workflows/release.yaml)[](https://github.com/go-skynet/LocalAI/actions/workflows/image.yml)[](https://github.com/go-skynet/LocalAI/actions/workflows/bump_deps.yaml)[](https://artifacthub.io/packages/search?repo=localai)
|
||||
|
||||
@@ -85,6 +85,7 @@ local-ai run oci://localai/phi-2:latest
|
||||
|
||||
## 📰 Latest project news
|
||||
|
||||
- Oct 2024: examples moved to [LocalAI-examples](https://github.com/mudler/LocalAI-examples)
|
||||
- Aug 2024: 🆕 FLUX-1, [P2P Explorer](https://explorer.localai.io)
|
||||
- July 2024: 🔥🔥 🆕 P2P Dashboard, LocalAI Federated mode and AI Swarms: https://github.com/mudler/LocalAI/pull/2723
|
||||
- June 2024: 🆕 You can browse now the model gallery without LocalAI! Check out https://models.localai.io
|
||||
|
||||
@@ -219,6 +219,7 @@ message ModelOptions {
|
||||
int32 SwapSpace = 53;
|
||||
int32 MaxModelLen = 54;
|
||||
int32 TensorParallelSize = 55;
|
||||
string LoadFormat = 58;
|
||||
|
||||
string MMProj = 41;
|
||||
|
||||
@@ -232,6 +233,11 @@ message ModelOptions {
|
||||
|
||||
bool FlashAttention = 56;
|
||||
bool NoKVOffload = 57;
|
||||
|
||||
string ModelPath = 59;
|
||||
|
||||
repeated string LoraAdapters = 60;
|
||||
repeated float LoraScales = 61;
|
||||
}
|
||||
|
||||
message Result {
|
||||
|
||||
@@ -670,7 +670,6 @@ struct llama_server_context
|
||||
slot->sparams.top_k = json_value(data, "top_k", default_sparams.top_k);
|
||||
slot->sparams.top_p = json_value(data, "top_p", default_sparams.top_p);
|
||||
slot->sparams.min_p = json_value(data, "min_p", default_sparams.min_p);
|
||||
slot->sparams.tfs_z = json_value(data, "tfs_z", default_sparams.tfs_z);
|
||||
slot->sparams.typ_p = json_value(data, "typical_p", default_sparams.typ_p);
|
||||
slot->sparams.temp = json_value(data, "temperature", default_sparams.temp);
|
||||
slot->sparams.dynatemp_range = json_value(data, "dynatemp_range", default_sparams.dynatemp_range);
|
||||
@@ -1206,7 +1205,6 @@ struct llama_server_context
|
||||
{"top_k", slot.sparams.top_k},
|
||||
{"top_p", slot.sparams.top_p},
|
||||
{"min_p", slot.sparams.min_p},
|
||||
{"tfs_z", slot.sparams.tfs_z},
|
||||
{"typical_p", slot.sparams.typ_p},
|
||||
{"repeat_last_n", slot.sparams.penalty_last_n},
|
||||
{"repeat_penalty", slot.sparams.penalty_repeat},
|
||||
@@ -2105,7 +2103,6 @@ json parse_options(bool streaming, const backend::PredictOptions* predict, llama
|
||||
// slot->params.n_predict = json_value(data, "n_predict", default_params.n_predict);
|
||||
// slot->sparams.top_k = json_value(data, "top_k", default_sparams.top_k);
|
||||
// slot->sparams.top_p = json_value(data, "top_p", default_sparams.top_p);
|
||||
// slot->sparams.tfs_z = json_value(data, "tfs_z", default_sparams.tfs_z);
|
||||
// slot->sparams.typical_p = json_value(data, "typical_p", default_sparams.typical_p);
|
||||
// slot->sparams.temp = json_value(data, "temperature", default_sparams.temp);
|
||||
// slot->sparams.penalty_last_n = json_value(data, "repeat_last_n", default_sparams.penalty_last_n);
|
||||
@@ -2129,7 +2126,6 @@ json parse_options(bool streaming, const backend::PredictOptions* predict, llama
|
||||
data["n_predict"] = predict->tokens() == 0 ? -1 : predict->tokens();
|
||||
data["top_k"] = predict->topk();
|
||||
data["top_p"] = predict->topp();
|
||||
data["tfs_z"] = predict->tailfreesamplingz();
|
||||
data["typical_p"] = predict->typicalp();
|
||||
data["temperature"] = predict->temperature();
|
||||
data["repeat_last_n"] = predict->repeat();
|
||||
@@ -2176,7 +2172,6 @@ json parse_options(bool streaming, const backend::PredictOptions* predict, llama
|
||||
// llama.params.n_predict = predict->tokens() == 0 ? -1 : predict->tokens();
|
||||
// llama.params.sparams.top_k = predict->topk();
|
||||
// llama.params.sparams.top_p = predict->topp();
|
||||
// llama.params.sparams.tfs_z = predict->tailfreesamplingz();
|
||||
// llama.params.sparams.typical_p = predict->typicalp();
|
||||
// llama.params.sparams.penalty_last_n = predict->repeat();
|
||||
// llama.params.sparams.temp = predict->temperature();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
accelerate
|
||||
auto-gptq==0.7.1
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
certifi
|
||||
transformers
|
||||
@@ -1,4 +1,4 @@
|
||||
bark==0.1.5
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
certifi
|
||||
@@ -1,8 +1,9 @@
|
||||
.DEFAULT_GOAL := install
|
||||
|
||||
.PHONY: install
|
||||
install: protogen
|
||||
install:
|
||||
bash install.sh
|
||||
$(MAKE) protogen
|
||||
|
||||
.PHONY: protogen
|
||||
protogen: backend_pb2_grpc.py backend_pb2.py
|
||||
@@ -12,7 +13,7 @@ protogen-clean:
|
||||
$(RM) backend_pb2_grpc.py backend_pb2.py
|
||||
|
||||
backend_pb2_grpc.py backend_pb2.py:
|
||||
python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto
|
||||
bash protogen.sh
|
||||
|
||||
.PHONY: clean
|
||||
clean: protogen-clean
|
||||
|
||||
6
backend/python/common/template/protogen.sh
Normal file
6
backend/python/common/template/protogen.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
source $(dirname $0)/../common/libbackend.sh
|
||||
|
||||
python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto
|
||||
@@ -1,2 +1,3 @@
|
||||
grpcio==1.67.0
|
||||
protobuf
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
grpcio-tools
|
||||
@@ -1,4 +1,4 @@
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
certifi
|
||||
packaging==24.1
|
||||
@@ -247,11 +247,16 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
|
||||
use_safetensors=True,
|
||||
variant=variant)
|
||||
elif request.PipelineType == "FluxPipeline":
|
||||
if fromSingleFile:
|
||||
self.pipe = FluxPipeline.from_single_file(modelFile,
|
||||
torch_dtype=torchType,
|
||||
use_safetensors=True)
|
||||
else:
|
||||
self.pipe = FluxPipeline.from_pretrained(
|
||||
request.Model,
|
||||
torch_dtype=torch.bfloat16)
|
||||
if request.LowVRAM:
|
||||
self.pipe.enable_model_cpu_offload()
|
||||
if request.LowVRAM:
|
||||
self.pipe.enable_model_cpu_offload()
|
||||
elif request.PipelineType == "FluxTransformer2DModel":
|
||||
dtype = torch.bfloat16
|
||||
# specify from environment or default to "ChuckMcSneed/FLUX.1-dev"
|
||||
@@ -296,22 +301,34 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
|
||||
self.pipe.controlnet = self.controlnet
|
||||
else:
|
||||
self.controlnet = None
|
||||
# Assume directory from request.ModelFile.
|
||||
# Only if request.LoraAdapter it's not an absolute path
|
||||
if request.LoraAdapter and request.ModelFile != "" and not os.path.isabs(request.LoraAdapter) and request.LoraAdapter:
|
||||
# get base path of modelFile
|
||||
modelFileBase = os.path.dirname(request.ModelFile)
|
||||
|
||||
if request.LoraAdapter and not os.path.isabs(request.LoraAdapter):
|
||||
# modify LoraAdapter to be relative to modelFileBase
|
||||
request.LoraAdapter = os.path.join(modelFileBase, request.LoraAdapter)
|
||||
request.LoraAdapter = os.path.join(request.ModelPath, request.LoraAdapter)
|
||||
|
||||
device = "cpu" if not request.CUDA else "cuda"
|
||||
self.device = device
|
||||
if request.LoraAdapter:
|
||||
# Check if its a local file and not a directory ( we load lora differently for a safetensor file )
|
||||
if os.path.exists(request.LoraAdapter) and not os.path.isdir(request.LoraAdapter):
|
||||
# self.load_lora_weights(request.LoraAdapter, 1, device, torchType)
|
||||
self.pipe.load_lora_weights(request.LoraAdapter)
|
||||
else:
|
||||
self.pipe.unet.load_attn_procs(request.LoraAdapter)
|
||||
if len(request.LoraAdapters) > 0:
|
||||
i = 0
|
||||
adapters_name = []
|
||||
adapters_weights = []
|
||||
for adapter in request.LoraAdapters:
|
||||
if not os.path.isabs(adapter):
|
||||
adapter = os.path.join(request.ModelPath, adapter)
|
||||
self.pipe.load_lora_weights(adapter, adapter_name=f"adapter_{i}")
|
||||
adapters_name.append(f"adapter_{i}")
|
||||
i += 1
|
||||
|
||||
for adapters_weight in request.LoraScales:
|
||||
adapters_weights.append(adapters_weight)
|
||||
|
||||
self.pipe.set_adapters(adapters_name, adapter_weights=adapters_weights)
|
||||
|
||||
if request.CUDA:
|
||||
self.pipe.to('cuda')
|
||||
@@ -392,8 +409,6 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
|
||||
# create a dictionary of values for the parameters
|
||||
options = {
|
||||
"negative_prompt": request.negative_prompt,
|
||||
"width": request.width,
|
||||
"height": request.height,
|
||||
"num_inference_steps": steps,
|
||||
}
|
||||
|
||||
@@ -411,13 +426,13 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
|
||||
keys = options.keys()
|
||||
|
||||
if request.EnableParameters != "":
|
||||
keys = request.EnableParameters.split(",")
|
||||
keys = [key.strip() for key in request.EnableParameters.split(",")]
|
||||
|
||||
if request.EnableParameters == "none":
|
||||
keys = []
|
||||
|
||||
# create a dictionary of parameters by using the keys from EnableParameters and the values from defaults
|
||||
kwargs = {key: options[key] for key in keys}
|
||||
kwargs = {key: options.get(key) for key in keys if key in options}
|
||||
|
||||
# Set seed
|
||||
if request.seed > 0:
|
||||
@@ -428,6 +443,12 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
|
||||
if self.PipelineType == "FluxPipeline":
|
||||
kwargs["max_sequence_length"] = 256
|
||||
|
||||
if request.width:
|
||||
kwargs["width"] = request.width
|
||||
|
||||
if request.height:
|
||||
kwargs["height"] = request.height
|
||||
|
||||
if self.PipelineType == "FluxTransformer2DModel":
|
||||
kwargs["output_type"] = "pil"
|
||||
kwargs["generator"] = torch.Generator("cpu").manual_seed(0)
|
||||
@@ -447,6 +468,7 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
|
||||
export_to_video(video_frames, request.dst)
|
||||
return backend_pb2.Result(message="Media generated successfully", success=True)
|
||||
|
||||
print(f"Generating image with {kwargs=}", file=sys.stderr)
|
||||
image = {}
|
||||
if COMPEL:
|
||||
conditioning, pooled = self.compel.build_conditioning_tensor(prompt)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
setuptools
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
pillow
|
||||
protobuf
|
||||
certifi
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
certifi
|
||||
wheel
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
certifi
|
||||
@@ -2,7 +2,7 @@
|
||||
intel-extension-for-pytorch
|
||||
torch
|
||||
optimum[openvino]
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
librosa==0.9.1
|
||||
faster-whisper==0.9.0
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
librosa
|
||||
faster-whisper
|
||||
|
||||
@@ -12,9 +12,10 @@ export SKIP_CONDA=1
|
||||
endif
|
||||
|
||||
.PHONY: parler-tts
|
||||
parler-tts: protogen
|
||||
parler-tts:
|
||||
@echo "Installing $(CONDA_ENV_PATH)..."
|
||||
bash install.sh $(CONDA_ENV_PATH)
|
||||
$(MAKE) protogen
|
||||
|
||||
.PHONY: run
|
||||
run: protogen
|
||||
@@ -36,7 +37,7 @@ protogen-clean:
|
||||
$(RM) backend_pb2_grpc.py backend_pb2.py
|
||||
|
||||
backend_pb2_grpc.py backend_pb2.py:
|
||||
python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto
|
||||
bash protogen.sh
|
||||
|
||||
.PHONY: clean
|
||||
clean: protogen-clean
|
||||
|
||||
@@ -11,8 +11,10 @@ if [ "x${BUILD_PROFILE}" == "xintel" ]; then
|
||||
EXTRA_PIP_INSTALL_FLAGS+=" --upgrade --index-strategy=unsafe-first-match"
|
||||
fi
|
||||
|
||||
|
||||
installRequirements
|
||||
|
||||
|
||||
# https://github.com/descriptinc/audiotools/issues/101
|
||||
# incompatible protobuf versions.
|
||||
PYDIR=python3.10
|
||||
|
||||
6
backend/python/parler-tts/protogen.sh
Executable file
6
backend/python/parler-tts/protogen.sh
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
source $(dirname $0)/../common/libbackend.sh
|
||||
|
||||
python3 -m grpc_tools.protoc -I../.. --python_out=. --grpc_python_out=. backend.proto
|
||||
@@ -1,3 +1,4 @@
|
||||
git+https://github.com/huggingface/parler-tts.git@8e465f1b5fcd223478e07175cb40494d19ffbe17
|
||||
llvmlite==0.43.0
|
||||
numba==0.60.0
|
||||
grpcio-tools==1.42.0
|
||||
@@ -1,4 +1,3 @@
|
||||
grpcio==1.67.0
|
||||
protobuf
|
||||
grpcio==1.67.1
|
||||
certifi
|
||||
llvmlite==0.43.0
|
||||
llvmlite==0.43.0
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
certifi
|
||||
@@ -1,4 +1,4 @@
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
certifi
|
||||
datasets
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
scipy==1.14.0
|
||||
certifi
|
||||
@@ -1,4 +1,4 @@
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
certifi
|
||||
setuptools==69.5.1 # https://github.com/mudler/LocalAI/issues/2406
|
||||
@@ -1,3 +1,3 @@
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
certifi
|
||||
@@ -95,6 +95,8 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
|
||||
|
||||
if request.Quantization != "":
|
||||
engine_args.quantization = request.Quantization
|
||||
if request.LoadFormat != "":
|
||||
engine_args.load_format = request.LoadFormat
|
||||
if request.GPUMemoryUtilization != 0:
|
||||
engine_args.gpu_memory_utilization = request.GPUMemoryUtilization
|
||||
if request.TrustRemoteCode:
|
||||
|
||||
@@ -22,7 +22,7 @@ if [ "x${BUILD_TYPE}" == "x" ] && [ "x${FROM_SOURCE}" == "xtrue" ]; then
|
||||
git clone https://github.com/vllm-project/vllm
|
||||
fi
|
||||
pushd vllm
|
||||
uv pip install wheel packaging ninja "setuptools>=49.4.0" numpy typing-extensions pillow setuptools-scm grpcio==1.67.0 protobuf bitsandbytes
|
||||
uv pip install wheel packaging ninja "setuptools>=49.4.0" numpy typing-extensions pillow setuptools-scm grpcio==1.67.1 protobuf bitsandbytes
|
||||
uv pip install -v -r requirements-cpu.txt --extra-index-url https://download.pytorch.org/whl/cpu
|
||||
VLLM_TARGET_DEVICE=cpu python setup.py install
|
||||
popd
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
grpcio==1.67.0
|
||||
grpcio==1.67.1
|
||||
protobuf
|
||||
certifi
|
||||
setuptools
|
||||
@@ -11,17 +11,9 @@ import (
|
||||
|
||||
func ModelEmbedding(s string, tokens []int, loader *model.ModelLoader, backendConfig config.BackendConfig, appConfig *config.ApplicationConfig) (func() ([]float32, error), error) {
|
||||
|
||||
var inferenceModel interface{}
|
||||
var err error
|
||||
opts := ModelOptions(backendConfig, appConfig)
|
||||
|
||||
opts := ModelOptions(backendConfig, appConfig, []model.Option{})
|
||||
|
||||
if backendConfig.Backend == "" {
|
||||
inferenceModel, err = loader.GreedyLoader(opts...)
|
||||
} else {
|
||||
opts = append(opts, model.WithBackendString(backendConfig.Backend))
|
||||
inferenceModel, err = loader.BackendLoader(opts...)
|
||||
}
|
||||
inferenceModel, err := loader.Load(opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -9,9 +9,8 @@ import (
|
||||
|
||||
func ImageGeneration(height, width, mode, step, seed int, positive_prompt, negative_prompt, src, dst string, loader *model.ModelLoader, backendConfig config.BackendConfig, appConfig *config.ApplicationConfig) (func() error, error) {
|
||||
|
||||
opts := ModelOptions(backendConfig, appConfig, []model.Option{})
|
||||
|
||||
inferenceModel, err := loader.BackendLoader(
|
||||
opts := ModelOptions(backendConfig, appConfig)
|
||||
inferenceModel, err := loader.Load(
|
||||
opts...,
|
||||
)
|
||||
if err != nil {
|
||||
|
||||
@@ -16,7 +16,6 @@ import (
|
||||
"github.com/mudler/LocalAI/core/schema"
|
||||
|
||||
"github.com/mudler/LocalAI/core/gallery"
|
||||
"github.com/mudler/LocalAI/pkg/grpc"
|
||||
"github.com/mudler/LocalAI/pkg/grpc/proto"
|
||||
model "github.com/mudler/LocalAI/pkg/model"
|
||||
"github.com/mudler/LocalAI/pkg/utils"
|
||||
@@ -35,15 +34,6 @@ type TokenUsage struct {
|
||||
func ModelInference(ctx context.Context, s string, messages []schema.Message, images, videos, audios []string, loader *model.ModelLoader, c config.BackendConfig, o *config.ApplicationConfig, tokenCallback func(string, TokenUsage) bool) (func() (LLMResponse, error), error) {
|
||||
modelFile := c.Model
|
||||
|
||||
var inferenceModel grpc.Backend
|
||||
var err error
|
||||
|
||||
opts := ModelOptions(c, o, []model.Option{})
|
||||
|
||||
if c.Backend != "" {
|
||||
opts = append(opts, model.WithBackendString(c.Backend))
|
||||
}
|
||||
|
||||
// Check if the modelFile exists, if it doesn't try to load it from the gallery
|
||||
if o.AutoloadGalleries { // experimental
|
||||
if _, err := os.Stat(modelFile); os.IsNotExist(err) {
|
||||
@@ -56,12 +46,8 @@ func ModelInference(ctx context.Context, s string, messages []schema.Message, im
|
||||
}
|
||||
}
|
||||
|
||||
if c.Backend == "" {
|
||||
inferenceModel, err = loader.GreedyLoader(opts...)
|
||||
} else {
|
||||
inferenceModel, err = loader.BackendLoader(opts...)
|
||||
}
|
||||
|
||||
opts := ModelOptions(c, o)
|
||||
inferenceModel, err := loader.Load(opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import (
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func ModelOptions(c config.BackendConfig, so *config.ApplicationConfig, opts []model.Option) []model.Option {
|
||||
func ModelOptions(c config.BackendConfig, so *config.ApplicationConfig, opts ...model.Option) []model.Option {
|
||||
name := c.Name
|
||||
if name == "" {
|
||||
name = c.Model
|
||||
@@ -125,6 +125,8 @@ func grpcModelOpts(c config.BackendConfig) *pb.ModelOptions {
|
||||
CFGScale: c.Diffusers.CFGScale,
|
||||
LoraAdapter: c.LoraAdapter,
|
||||
LoraScale: c.LoraScale,
|
||||
LoraAdapters: c.LoraAdapters,
|
||||
LoraScales: c.LoraScales,
|
||||
F16Memory: f16,
|
||||
LoraBase: c.LoraBase,
|
||||
IMG2IMG: c.Diffusers.IMG2IMG,
|
||||
@@ -139,6 +141,7 @@ func grpcModelOpts(c config.BackendConfig) *pb.ModelOptions {
|
||||
DraftModel: c.DraftModel,
|
||||
AudioPath: c.VallE.AudioPath,
|
||||
Quantization: c.Quantization,
|
||||
LoadFormat: c.LoadFormat,
|
||||
GPUMemoryUtilization: c.GPUMemoryUtilization,
|
||||
TrustRemoteCode: c.TrustRemoteCode,
|
||||
EnforceEager: c.EnforceEager,
|
||||
|
||||
@@ -11,8 +11,8 @@ import (
|
||||
|
||||
func Rerank(modelFile string, request *proto.RerankRequest, loader *model.ModelLoader, appConfig *config.ApplicationConfig, backendConfig config.BackendConfig) (*proto.RerankResult, error) {
|
||||
|
||||
opts := ModelOptions(backendConfig, appConfig, []model.Option{model.WithModel(modelFile)})
|
||||
rerankModel, err := loader.BackendLoader(opts...)
|
||||
opts := ModelOptions(backendConfig, appConfig, model.WithModel(modelFile))
|
||||
rerankModel, err := loader.Load(opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -25,9 +25,8 @@ func SoundGeneration(
|
||||
backendConfig config.BackendConfig,
|
||||
) (string, *proto.Result, error) {
|
||||
|
||||
opts := ModelOptions(backendConfig, appConfig, []model.Option{model.WithModel(modelFile)})
|
||||
|
||||
soundGenModel, err := loader.BackendLoader(opts...)
|
||||
opts := ModelOptions(backendConfig, appConfig, model.WithModel(modelFile))
|
||||
soundGenModel, err := loader.Load(opts...)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
@@ -8,16 +8,15 @@ import (
|
||||
)
|
||||
|
||||
func StoreBackend(sl *model.ModelLoader, appConfig *config.ApplicationConfig, storeName string) (grpc.Backend, error) {
|
||||
if storeName == "" {
|
||||
storeName = "default"
|
||||
}
|
||||
if storeName == "" {
|
||||
storeName = "default"
|
||||
}
|
||||
|
||||
sc := []model.Option{
|
||||
model.WithBackendString(model.LocalStoreBackend),
|
||||
model.WithAssetDir(appConfig.AssetsDestination),
|
||||
model.WithModel(storeName),
|
||||
}
|
||||
sc := []model.Option{
|
||||
model.WithBackendString(model.LocalStoreBackend),
|
||||
model.WithAssetDir(appConfig.AssetsDestination),
|
||||
model.WithModel(storeName),
|
||||
}
|
||||
|
||||
return sl.BackendLoader(sc...)
|
||||
return sl.Load(sc...)
|
||||
}
|
||||
|
||||
|
||||
@@ -15,10 +15,8 @@ func TokenMetrics(
|
||||
appConfig *config.ApplicationConfig,
|
||||
backendConfig config.BackendConfig) (*proto.MetricsResponse, error) {
|
||||
|
||||
opts := ModelOptions(backendConfig, appConfig, []model.Option{
|
||||
model.WithModel(modelFile),
|
||||
})
|
||||
model, err := loader.BackendLoader(opts...)
|
||||
opts := ModelOptions(backendConfig, appConfig, model.WithModel(modelFile))
|
||||
model, err := loader.Load(opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -14,15 +14,13 @@ func ModelTokenize(s string, loader *model.ModelLoader, backendConfig config.Bac
|
||||
var inferenceModel grpc.Backend
|
||||
var err error
|
||||
|
||||
opts := ModelOptions(backendConfig, appConfig, []model.Option{
|
||||
model.WithModel(modelFile),
|
||||
})
|
||||
opts := ModelOptions(backendConfig, appConfig, model.WithModel(modelFile))
|
||||
|
||||
if backendConfig.Backend == "" {
|
||||
inferenceModel, err = loader.GreedyLoader(opts...)
|
||||
inferenceModel, err = loader.Load(opts...)
|
||||
} else {
|
||||
opts = append(opts, model.WithBackendString(backendConfig.Backend))
|
||||
inferenceModel, err = loader.BackendLoader(opts...)
|
||||
inferenceModel, err = loader.Load(opts...)
|
||||
}
|
||||
if err != nil {
|
||||
return schema.TokenizeResponse{}, err
|
||||
|
||||
@@ -18,9 +18,9 @@ func ModelTranscription(audio, language string, translate bool, ml *model.ModelL
|
||||
backendConfig.Backend = model.WhisperBackend
|
||||
}
|
||||
|
||||
opts := ModelOptions(backendConfig, appConfig, []model.Option{})
|
||||
opts := ModelOptions(backendConfig, appConfig)
|
||||
|
||||
transcriptionModel, err := ml.BackendLoader(opts...)
|
||||
transcriptionModel, err := ml.Load(opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -28,11 +28,8 @@ func ModelTTS(
|
||||
bb = model.PiperBackend
|
||||
}
|
||||
|
||||
opts := ModelOptions(config.BackendConfig{}, appConfig, []model.Option{
|
||||
model.WithBackendString(bb),
|
||||
model.WithModel(modelFile),
|
||||
})
|
||||
ttsModel, err := loader.BackendLoader(opts...)
|
||||
opts := ModelOptions(backendConfig, appConfig, model.WithBackendString(bb), model.WithModel(modelFile))
|
||||
ttsModel, err := loader.Load(opts...)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
@@ -53,6 +53,7 @@ type RunCMD struct {
|
||||
OpaqueErrors bool `env:"LOCALAI_OPAQUE_ERRORS" default:"false" help:"If true, all error responses are replaced with blank 500 errors. This is intended only for hardening against information leaks and is normally not recommended." group:"hardening"`
|
||||
UseSubtleKeyComparison bool `env:"LOCALAI_SUBTLE_KEY_COMPARISON" default:"false" help:"If true, API Key validation comparisons will be performed using constant-time comparisons rather than simple equality. This trades off performance on each request for resiliancy against timing attacks." group:"hardening"`
|
||||
DisableApiKeyRequirementForHttpGet bool `env:"LOCALAI_DISABLE_API_KEY_REQUIREMENT_FOR_HTTP_GET" default:"false" help:"If true, a valid API key is not required to issue GET requests to portions of the web ui. This should only be enabled in secure testing environments" group:"hardening"`
|
||||
DisableMetricsEndpoint bool `env:"LOCALAI_DISABLE_METRICS_ENDPOINT,DISABLE_METRICS_ENDPOINT" default:"false" help:"Disable the /metrics endpoint" group:"api"`
|
||||
HttpGetExemptedEndpoints []string `env:"LOCALAI_HTTP_GET_EXEMPTED_ENDPOINTS" default:"^/$,^/browse/?$,^/talk/?$,^/p2p/?$,^/chat/?$,^/text2image/?$,^/tts/?$,^/static/.*$,^/swagger.*$" help:"If LOCALAI_DISABLE_API_KEY_REQUIREMENT_FOR_HTTP_GET is overriden to true, this is the list of endpoints to exempt. Only adjust this in case of a security incident or as a result of a personal security posture review" group:"hardening"`
|
||||
Peer2Peer bool `env:"LOCALAI_P2P,P2P" name:"p2p" default:"false" help:"Enable P2P mode" group:"p2p"`
|
||||
Peer2PeerDHTInterval int `env:"LOCALAI_P2P_DHT_INTERVAL,P2P_DHT_INTERVAL" default:"360" name:"p2p-dht-interval" help:"Interval for DHT refresh (used during token generation)" group:"p2p"`
|
||||
@@ -108,6 +109,10 @@ func (r *RunCMD) Run(ctx *cliContext.Context) error {
|
||||
config.WithLoadToMemory(r.LoadToMemory),
|
||||
}
|
||||
|
||||
if r.DisableMetricsEndpoint {
|
||||
opts = append(opts, config.DisableMetricsEndpoint)
|
||||
}
|
||||
|
||||
token := ""
|
||||
if r.Peer2Peer || r.Peer2PeerToken != "" {
|
||||
log.Info().Msg("P2P mode enabled")
|
||||
|
||||
@@ -39,6 +39,7 @@ type ApplicationConfig struct {
|
||||
OpaqueErrors bool
|
||||
UseSubtleKeyComparison bool
|
||||
DisableApiKeyRequirementForHttpGet bool
|
||||
DisableMetrics bool
|
||||
HttpGetExemptedEndpoints []*regexp.Regexp
|
||||
DisableGalleryEndpoint bool
|
||||
LoadToMemory []string
|
||||
@@ -350,6 +351,10 @@ func WithDisableApiKeyRequirementForHttpGet(required bool) AppOption {
|
||||
}
|
||||
}
|
||||
|
||||
var DisableMetricsEndpoint AppOption = func(o *ApplicationConfig) {
|
||||
o.DisableMetrics = true
|
||||
}
|
||||
|
||||
func WithHttpGetExemptedEndpoints(endpoints []string) AppOption {
|
||||
return func(o *ApplicationConfig) {
|
||||
o.HttpGetExemptedEndpoints = []*regexp.Regexp{}
|
||||
|
||||
@@ -134,22 +134,25 @@ type LLMConfig struct {
|
||||
TrimSpace []string `yaml:"trimspace"`
|
||||
TrimSuffix []string `yaml:"trimsuffix"`
|
||||
|
||||
ContextSize *int `yaml:"context_size"`
|
||||
NUMA bool `yaml:"numa"`
|
||||
LoraAdapter string `yaml:"lora_adapter"`
|
||||
LoraBase string `yaml:"lora_base"`
|
||||
LoraScale float32 `yaml:"lora_scale"`
|
||||
NoMulMatQ bool `yaml:"no_mulmatq"`
|
||||
DraftModel string `yaml:"draft_model"`
|
||||
NDraft int32 `yaml:"n_draft"`
|
||||
Quantization string `yaml:"quantization"`
|
||||
GPUMemoryUtilization float32 `yaml:"gpu_memory_utilization"` // vLLM
|
||||
TrustRemoteCode bool `yaml:"trust_remote_code"` // vLLM
|
||||
EnforceEager bool `yaml:"enforce_eager"` // vLLM
|
||||
SwapSpace int `yaml:"swap_space"` // vLLM
|
||||
MaxModelLen int `yaml:"max_model_len"` // vLLM
|
||||
TensorParallelSize int `yaml:"tensor_parallel_size"` // vLLM
|
||||
MMProj string `yaml:"mmproj"`
|
||||
ContextSize *int `yaml:"context_size"`
|
||||
NUMA bool `yaml:"numa"`
|
||||
LoraAdapter string `yaml:"lora_adapter"`
|
||||
LoraBase string `yaml:"lora_base"`
|
||||
LoraAdapters []string `yaml:"lora_adapters"`
|
||||
LoraScales []float32 `yaml:"lora_scales"`
|
||||
LoraScale float32 `yaml:"lora_scale"`
|
||||
NoMulMatQ bool `yaml:"no_mulmatq"`
|
||||
DraftModel string `yaml:"draft_model"`
|
||||
NDraft int32 `yaml:"n_draft"`
|
||||
Quantization string `yaml:"quantization"`
|
||||
LoadFormat string `yaml:"load_format"`
|
||||
GPUMemoryUtilization float32 `yaml:"gpu_memory_utilization"` // vLLM
|
||||
TrustRemoteCode bool `yaml:"trust_remote_code"` // vLLM
|
||||
EnforceEager bool `yaml:"enforce_eager"` // vLLM
|
||||
SwapSpace int `yaml:"swap_space"` // vLLM
|
||||
MaxModelLen int `yaml:"max_model_len"` // vLLM
|
||||
TensorParallelSize int `yaml:"tensor_parallel_size"` // vLLM
|
||||
MMProj string `yaml:"mmproj"`
|
||||
|
||||
FlashAttention bool `yaml:"flash_attention"`
|
||||
NoKVOffloading bool `yaml:"no_kv_offloading"`
|
||||
@@ -197,9 +200,7 @@ type TemplateConfig struct {
|
||||
// It defaults to \n
|
||||
JoinChatMessagesByCharacter *string `yaml:"join_chat_messages_by_character"`
|
||||
|
||||
Video string `yaml:"video"`
|
||||
Image string `yaml:"image"`
|
||||
Audio string `yaml:"audio"`
|
||||
Multimodal string `yaml:"multimodal"`
|
||||
}
|
||||
|
||||
func (c *BackendConfig) UnmarshalYAML(value *yaml.Node) error {
|
||||
|
||||
@@ -109,19 +109,21 @@ func App(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *confi
|
||||
app.Use(recover.New())
|
||||
}
|
||||
|
||||
metricsService, err := services.NewLocalAIMetricsService()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !appConfig.DisableMetrics {
|
||||
metricsService, err := services.NewLocalAIMetricsService()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if metricsService != nil {
|
||||
app.Use(localai.LocalAIMetricsAPIMiddleware(metricsService))
|
||||
app.Hooks().OnShutdown(func() error {
|
||||
return metricsService.Shutdown()
|
||||
})
|
||||
}
|
||||
if metricsService != nil {
|
||||
app.Use(localai.LocalAIMetricsAPIMiddleware(metricsService))
|
||||
app.Hooks().OnShutdown(func() error {
|
||||
return metricsService.Shutdown()
|
||||
})
|
||||
}
|
||||
|
||||
// Health Checks should always be exempt from auth, so register these first
|
||||
}
|
||||
// Health Checks should always be exempt from auth, so register these first
|
||||
routes.HealthRoutes(app)
|
||||
|
||||
kaConfig, err := middleware.GetKeyAuthConfig(appConfig)
|
||||
|
||||
@@ -438,7 +438,7 @@ var _ = Describe("API test", func() {
|
||||
Eventually(func() bool {
|
||||
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
||||
return response["processed"].(bool)
|
||||
}, "360s", "10s").Should(Equal(true))
|
||||
}, "900s", "10s").Should(Equal(true))
|
||||
|
||||
Eventually(func() []string {
|
||||
models, _ := client.ListModels(context.TODO())
|
||||
@@ -562,7 +562,7 @@ var _ = Describe("API test", func() {
|
||||
Eventually(func() bool {
|
||||
response := getModelStatus("http://127.0.0.1:9090/models/jobs/" + uuid)
|
||||
return response["processed"].(bool)
|
||||
}, "360s", "10s").Should(Equal(true))
|
||||
}, "900s", "10s").Should(Equal(true))
|
||||
|
||||
By("testing chat")
|
||||
resp, err := client.CreateChatCompletion(context.TODO(), openai.ChatCompletionRequest{Model: modelName, Messages: []openai.ChatCompletionMessage{
|
||||
|
||||
97
core/http/elements/buttons.go
Normal file
97
core/http/elements/buttons.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package elements
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/chasefleming/elem-go"
|
||||
"github.com/chasefleming/elem-go/attrs"
|
||||
"github.com/mudler/LocalAI/core/gallery"
|
||||
)
|
||||
|
||||
func installButton(galleryName string) elem.Node {
|
||||
return elem.Button(
|
||||
attrs.Props{
|
||||
"data-twe-ripple-init": "",
|
||||
"data-twe-ripple-color": "light",
|
||||
"class": "float-right inline-block rounded bg-primary px-6 pb-2.5 mb-3 pt-2.5 text-xs font-medium uppercase leading-normal text-white shadow-primary-3 transition duration-150 ease-in-out hover:bg-primary-accent-300 hover:shadow-primary-2 focus:bg-primary-accent-300 focus:shadow-primary-2 focus:outline-none focus:ring-0 active:bg-primary-600 active:shadow-primary-2 dark:shadow-black/30 dark:hover:shadow-dark-strong dark:focus:shadow-dark-strong dark:active:shadow-dark-strong",
|
||||
"hx-swap": "outerHTML",
|
||||
// post the Model ID as param
|
||||
"hx-post": "/browse/install/model/" + galleryName,
|
||||
},
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fa-solid fa-download pr-2",
|
||||
},
|
||||
),
|
||||
elem.Text("Install"),
|
||||
)
|
||||
}
|
||||
|
||||
func reInstallButton(galleryName string) elem.Node {
|
||||
return elem.Button(
|
||||
attrs.Props{
|
||||
"data-twe-ripple-init": "",
|
||||
"data-twe-ripple-color": "light",
|
||||
"class": "float-right inline-block rounded bg-primary ml-2 px-6 pb-2.5 mb-3 pt-2.5 text-xs font-medium uppercase leading-normal text-white shadow-primary-3 transition duration-150 ease-in-out hover:bg-primary-accent-300 hover:shadow-primary-2 focus:bg-primary-accent-300 focus:shadow-primary-2 focus:outline-none focus:ring-0 active:bg-primary-600 active:shadow-primary-2 dark:shadow-black/30 dark:hover:shadow-dark-strong dark:focus:shadow-dark-strong dark:active:shadow-dark-strong",
|
||||
"hx-target": "#action-div-" + dropBadChars(galleryName),
|
||||
"hx-swap": "outerHTML",
|
||||
// post the Model ID as param
|
||||
"hx-post": "/browse/install/model/" + galleryName,
|
||||
},
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fa-solid fa-arrow-rotate-right pr-2",
|
||||
},
|
||||
),
|
||||
elem.Text("Reinstall"),
|
||||
)
|
||||
}
|
||||
|
||||
func infoButton(m *gallery.GalleryModel) elem.Node {
|
||||
return elem.Button(
|
||||
attrs.Props{
|
||||
"data-twe-ripple-init": "",
|
||||
"data-twe-ripple-color": "light",
|
||||
"class": "float-left inline-block rounded bg-primary px-6 pb-2.5 mb-3 pt-2.5 text-xs font-medium uppercase leading-normal text-white shadow-primary-3 transition duration-150 ease-in-out hover:bg-primary-accent-300 hover:shadow-primary-2 focus:bg-primary-accent-300 focus:shadow-primary-2 focus:outline-none focus:ring-0 active:bg-primary-600 active:shadow-primary-2 dark:shadow-black/30 dark:hover:shadow-dark-strong dark:focus:shadow-dark-strong dark:active:shadow-dark-strong",
|
||||
"data-modal-target": modalName(m),
|
||||
"data-modal-toggle": modalName(m),
|
||||
},
|
||||
elem.P(
|
||||
attrs.Props{
|
||||
"class": "flex items-center",
|
||||
},
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fas fa-info-circle pr-2",
|
||||
},
|
||||
),
|
||||
elem.Text("Info"),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
func deleteButton(galleryID string) elem.Node {
|
||||
return elem.Button(
|
||||
attrs.Props{
|
||||
"data-twe-ripple-init": "",
|
||||
"data-twe-ripple-color": "light",
|
||||
"hx-confirm": "Are you sure you wish to delete the model?",
|
||||
"class": "float-right inline-block rounded bg-red-800 px-6 pb-2.5 mb-3 pt-2.5 text-xs font-medium uppercase leading-normal text-white shadow-primary-3 transition duration-150 ease-in-out hover:bg-red-accent-300 hover:shadow-red-2 focus:bg-red-accent-300 focus:shadow-primary-2 focus:outline-none focus:ring-0 active:bg-red-600 active:shadow-primary-2 dark:shadow-black/30 dark:hover:shadow-dark-strong dark:focus:shadow-dark-strong dark:active:shadow-dark-strong",
|
||||
"hx-target": "#action-div-" + dropBadChars(galleryID),
|
||||
"hx-swap": "outerHTML",
|
||||
// post the Model ID as param
|
||||
"hx-post": "/browse/delete/model/" + galleryID,
|
||||
},
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fa-solid fa-cancel pr-2",
|
||||
},
|
||||
),
|
||||
elem.Text("Delete"),
|
||||
)
|
||||
}
|
||||
|
||||
// Javascript/HTMX doesn't like weird IDs
|
||||
func dropBadChars(s string) string {
|
||||
return strings.ReplaceAll(s, "@", "__")
|
||||
}
|
||||
@@ -2,13 +2,11 @@ package elements
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/chasefleming/elem-go"
|
||||
"github.com/chasefleming/elem-go/attrs"
|
||||
"github.com/microcosm-cc/bluemonday"
|
||||
"github.com/mudler/LocalAI/core/gallery"
|
||||
"github.com/mudler/LocalAI/core/p2p"
|
||||
"github.com/mudler/LocalAI/core/services"
|
||||
)
|
||||
|
||||
@@ -16,231 +14,6 @@ const (
|
||||
noImage = "https://upload.wikimedia.org/wikipedia/commons/6/65/No-Image-Placeholder.svg"
|
||||
)
|
||||
|
||||
func renderElements(n []elem.Node) string {
|
||||
render := ""
|
||||
for _, r := range n {
|
||||
render += r.Render()
|
||||
}
|
||||
return render
|
||||
}
|
||||
|
||||
func DoneProgress(galleryID, text string, showDelete bool) string {
|
||||
var modelName = galleryID
|
||||
// Split by @ and grab the name
|
||||
if strings.Contains(galleryID, "@") {
|
||||
modelName = strings.Split(galleryID, "@")[1]
|
||||
}
|
||||
|
||||
return elem.Div(
|
||||
attrs.Props{
|
||||
"id": "action-div-" + dropBadChars(galleryID),
|
||||
},
|
||||
elem.H3(
|
||||
attrs.Props{
|
||||
"role": "status",
|
||||
"id": "pblabel",
|
||||
"tabindex": "-1",
|
||||
"autofocus": "",
|
||||
},
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(text)),
|
||||
),
|
||||
elem.If(showDelete, deleteButton(galleryID, modelName), reInstallButton(galleryID)),
|
||||
).Render()
|
||||
}
|
||||
|
||||
func ErrorProgress(err, galleryName string) string {
|
||||
return elem.Div(
|
||||
attrs.Props{},
|
||||
elem.H3(
|
||||
attrs.Props{
|
||||
"role": "status",
|
||||
"id": "pblabel",
|
||||
"tabindex": "-1",
|
||||
"autofocus": "",
|
||||
},
|
||||
elem.Text("Error "+bluemonday.StrictPolicy().Sanitize(err)),
|
||||
),
|
||||
installButton(galleryName),
|
||||
).Render()
|
||||
}
|
||||
|
||||
func ProgressBar(progress string) string {
|
||||
return elem.Div(attrs.Props{
|
||||
"class": "progress",
|
||||
"role": "progressbar",
|
||||
"aria-valuemin": "0",
|
||||
"aria-valuemax": "100",
|
||||
"aria-valuenow": "0",
|
||||
"aria-labelledby": "pblabel",
|
||||
},
|
||||
elem.Div(attrs.Props{
|
||||
"id": "pb",
|
||||
"class": "progress-bar",
|
||||
"style": "width:" + progress + "%",
|
||||
}),
|
||||
).Render()
|
||||
}
|
||||
|
||||
func P2PNodeStats(nodes []p2p.NodeData) string {
|
||||
/*
|
||||
<div class="bg-gray-800 p-6 rounded-lg shadow-lg text-left">
|
||||
<p class="text-xl font-semibold text-gray-200">Total Workers Detected: {{ len .Nodes }}</p>
|
||||
{{ $online := 0 }}
|
||||
{{ range .Nodes }}
|
||||
{{ if .IsOnline }}
|
||||
{{ $online = add $online 1 }}
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
<p class="text-xl font-semibold text-gray-200">Total Online Workers: {{$online}}</p>
|
||||
</div>
|
||||
*/
|
||||
|
||||
online := 0
|
||||
for _, n := range nodes {
|
||||
if n.IsOnline() {
|
||||
online++
|
||||
}
|
||||
}
|
||||
|
||||
class := "text-green-500"
|
||||
if online == 0 {
|
||||
class = "text-red-500"
|
||||
}
|
||||
/*
|
||||
<i class="fas fa-circle animate-pulse text-green-500 ml-2 mr-1"></i>
|
||||
*/
|
||||
circle := elem.I(attrs.Props{
|
||||
"class": "fas fa-circle animate-pulse " + class + " ml-2 mr-1",
|
||||
})
|
||||
nodesElements := []elem.Node{
|
||||
elem.Span(
|
||||
attrs.Props{
|
||||
"class": class,
|
||||
},
|
||||
circle,
|
||||
elem.Text(fmt.Sprintf("%d", online)),
|
||||
),
|
||||
elem.Span(
|
||||
attrs.Props{
|
||||
"class": "text-gray-200",
|
||||
},
|
||||
elem.Text(fmt.Sprintf("/%d", len(nodes))),
|
||||
),
|
||||
}
|
||||
|
||||
return renderElements(nodesElements)
|
||||
}
|
||||
|
||||
func P2PNodeBoxes(nodes []p2p.NodeData) string {
|
||||
/*
|
||||
<div class="bg-gray-800 p-4 rounded-lg shadow-lg text-left">
|
||||
<div class="flex items-center mb-2">
|
||||
<i class="fas fa-desktop text-gray-400 mr-2"></i>
|
||||
<span class="text-gray-200 font-semibold">{{.ID}}</span>
|
||||
</div>
|
||||
<p class="text-sm text-gray-400 mt-2 flex items-center">
|
||||
Status:
|
||||
<i class="fas fa-circle {{ if .IsOnline }}text-green-500{{ else }}text-red-500{{ end }} ml-2 mr-1"></i>
|
||||
<span class="{{ if .IsOnline }}text-green-400{{ else }}text-red-400{{ end }}">
|
||||
{{ if .IsOnline }}Online{{ else }}Offline{{ end }}
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
*/
|
||||
|
||||
nodesElements := []elem.Node{}
|
||||
|
||||
for _, n := range nodes {
|
||||
|
||||
nodesElements = append(nodesElements,
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"class": "bg-gray-700 p-6 rounded-lg shadow-lg text-left",
|
||||
},
|
||||
elem.P(
|
||||
attrs.Props{
|
||||
"class": "text-sm text-gray-400 mt-2 flex",
|
||||
},
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fas fa-desktop text-gray-400 mr-2",
|
||||
},
|
||||
),
|
||||
elem.Text("Name: "),
|
||||
elem.Span(
|
||||
attrs.Props{
|
||||
"class": "text-gray-200 font-semibold ml-2 mr-1",
|
||||
},
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(n.ID)),
|
||||
),
|
||||
elem.Text("Status: "),
|
||||
elem.If(
|
||||
n.IsOnline(),
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fas fa-circle animate-pulse text-green-500 ml-2 mr-1",
|
||||
},
|
||||
),
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fas fa-circle animate-pulse text-red-500 ml-2 mr-1",
|
||||
},
|
||||
),
|
||||
),
|
||||
elem.If(
|
||||
n.IsOnline(),
|
||||
elem.Span(
|
||||
attrs.Props{
|
||||
"class": "text-green-400",
|
||||
},
|
||||
|
||||
elem.Text("Online"),
|
||||
),
|
||||
elem.Span(
|
||||
attrs.Props{
|
||||
"class": "text-red-400",
|
||||
},
|
||||
elem.Text("Offline"),
|
||||
),
|
||||
),
|
||||
),
|
||||
))
|
||||
}
|
||||
|
||||
return renderElements(nodesElements)
|
||||
}
|
||||
|
||||
func StartProgressBar(uid, progress, text string) string {
|
||||
if progress == "" {
|
||||
progress = "0"
|
||||
}
|
||||
return elem.Div(
|
||||
attrs.Props{
|
||||
"hx-trigger": "done",
|
||||
"hx-get": "/browse/job/" + uid,
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-target": "this",
|
||||
},
|
||||
elem.H3(
|
||||
attrs.Props{
|
||||
"role": "status",
|
||||
"id": "pblabel",
|
||||
"tabindex": "-1",
|
||||
"autofocus": "",
|
||||
},
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(text)), //Perhaps overly defensive
|
||||
elem.Div(attrs.Props{
|
||||
"hx-get": "/browse/job/progress/" + uid,
|
||||
"hx-trigger": "every 600ms",
|
||||
"hx-target": "this",
|
||||
"hx-swap": "innerHTML",
|
||||
},
|
||||
elem.Raw(ProgressBar(progress)),
|
||||
),
|
||||
),
|
||||
).Render()
|
||||
}
|
||||
|
||||
func cardSpan(text, icon string) elem.Node {
|
||||
return elem.Span(
|
||||
attrs.Props{
|
||||
@@ -268,7 +41,6 @@ func searchableElement(text, icon string) elem.Node {
|
||||
attrs.Props{
|
||||
"class": "inline-block bg-gray-200 rounded-full px-3 py-1 text-sm font-semibold text-gray-700 mr-2 mb-2 hover:bg-gray-300 hover:shadow-gray-2",
|
||||
},
|
||||
|
||||
elem.A(
|
||||
attrs.Props{
|
||||
// "name": "search",
|
||||
@@ -290,7 +62,8 @@ func searchableElement(text, icon string) elem.Node {
|
||||
)
|
||||
}
|
||||
|
||||
func link(text, url string) elem.Node {
|
||||
/*
|
||||
func buttonLink(text, url string) elem.Node {
|
||||
return elem.A(
|
||||
attrs.Props{
|
||||
"class": "inline-block bg-gray-200 rounded-full px-3 py-1 text-sm font-semibold text-gray-700 mr-2 mb-2 hover:bg-gray-300 hover:shadow-gray-2",
|
||||
@@ -303,163 +76,255 @@ func link(text, url string) elem.Node {
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(text)),
|
||||
)
|
||||
}
|
||||
func installButton(galleryName string) elem.Node {
|
||||
return elem.Button(
|
||||
attrs.Props{
|
||||
"data-twe-ripple-init": "",
|
||||
"data-twe-ripple-color": "light",
|
||||
"class": "float-right inline-block rounded bg-primary px-6 pb-2.5 mb-3 pt-2.5 text-xs font-medium uppercase leading-normal text-white shadow-primary-3 transition duration-150 ease-in-out hover:bg-primary-accent-300 hover:shadow-primary-2 focus:bg-primary-accent-300 focus:shadow-primary-2 focus:outline-none focus:ring-0 active:bg-primary-600 active:shadow-primary-2 dark:shadow-black/30 dark:hover:shadow-dark-strong dark:focus:shadow-dark-strong dark:active:shadow-dark-strong",
|
||||
"hx-swap": "outerHTML",
|
||||
// post the Model ID as param
|
||||
"hx-post": "/browse/install/model/" + galleryName,
|
||||
},
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fa-solid fa-download pr-2",
|
||||
},
|
||||
),
|
||||
elem.Text("Install"),
|
||||
)
|
||||
}
|
||||
*/
|
||||
|
||||
func reInstallButton(galleryName string) elem.Node {
|
||||
return elem.Button(
|
||||
func link(text, url string) elem.Node {
|
||||
return elem.A(
|
||||
attrs.Props{
|
||||
"data-twe-ripple-init": "",
|
||||
"data-twe-ripple-color": "light",
|
||||
"class": "float-right inline-block rounded bg-primary ml-2 px-6 pb-2.5 mb-3 pt-2.5 text-xs font-medium uppercase leading-normal text-white shadow-primary-3 transition duration-150 ease-in-out hover:bg-primary-accent-300 hover:shadow-primary-2 focus:bg-primary-accent-300 focus:shadow-primary-2 focus:outline-none focus:ring-0 active:bg-primary-600 active:shadow-primary-2 dark:shadow-black/30 dark:hover:shadow-dark-strong dark:focus:shadow-dark-strong dark:active:shadow-dark-strong",
|
||||
"hx-target": "#action-div-" + dropBadChars(galleryName),
|
||||
"hx-swap": "outerHTML",
|
||||
// post the Model ID as param
|
||||
"hx-post": "/browse/install/model/" + galleryName,
|
||||
"class": "text-base leading-relaxed text-gray-500 dark:text-gray-400",
|
||||
"href": url,
|
||||
"target": "_blank",
|
||||
},
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fa-solid fa-arrow-rotate-right pr-2",
|
||||
},
|
||||
),
|
||||
elem.Text("Reinstall"),
|
||||
elem.I(attrs.Props{
|
||||
"class": "fas fa-link pr-2",
|
||||
}),
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(text)),
|
||||
)
|
||||
}
|
||||
|
||||
func deleteButton(galleryID, modelName string) elem.Node {
|
||||
return elem.Button(
|
||||
attrs.Props{
|
||||
"data-twe-ripple-init": "",
|
||||
"data-twe-ripple-color": "light",
|
||||
"hx-confirm": "Are you sure you wish to delete the model?",
|
||||
"class": "float-right inline-block rounded bg-red-800 px-6 pb-2.5 mb-3 pt-2.5 text-xs font-medium uppercase leading-normal text-white shadow-primary-3 transition duration-150 ease-in-out hover:bg-red-accent-300 hover:shadow-red-2 focus:bg-red-accent-300 focus:shadow-primary-2 focus:outline-none focus:ring-0 active:bg-red-600 active:shadow-primary-2 dark:shadow-black/30 dark:hover:shadow-dark-strong dark:focus:shadow-dark-strong dark:active:shadow-dark-strong",
|
||||
"hx-target": "#action-div-" + dropBadChars(galleryID),
|
||||
"hx-swap": "outerHTML",
|
||||
// post the Model ID as param
|
||||
"hx-post": "/browse/delete/model/" + galleryID,
|
||||
},
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fa-solid fa-cancel pr-2",
|
||||
},
|
||||
),
|
||||
elem.Text("Delete"),
|
||||
)
|
||||
}
|
||||
|
||||
// Javascript/HTMX doesn't like weird IDs
|
||||
func dropBadChars(s string) string {
|
||||
return strings.ReplaceAll(s, "@", "__")
|
||||
}
|
||||
|
||||
type ProcessTracker interface {
|
||||
Exists(string) bool
|
||||
Get(string) string
|
||||
}
|
||||
|
||||
func ListModels(models []*gallery.GalleryModel, processTracker ProcessTracker, galleryService *services.GalleryService) string {
|
||||
modelsElements := []elem.Node{}
|
||||
descriptionDiv := func(m *gallery.GalleryModel) elem.Node {
|
||||
return elem.Div(
|
||||
attrs.Props{
|
||||
"class": "p-6 text-surface dark:text-white",
|
||||
},
|
||||
elem.H5(
|
||||
attrs.Props{
|
||||
"class": "mb-2 text-xl font-bold leading-tight",
|
||||
},
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(m.Name)),
|
||||
),
|
||||
elem.P(
|
||||
attrs.Props{
|
||||
"class": "mb-4 text-sm [&:not(:hover)]:truncate text-base",
|
||||
},
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(m.Description)),
|
||||
),
|
||||
func modalName(m *gallery.GalleryModel) string {
|
||||
return m.Name + "-modal"
|
||||
}
|
||||
|
||||
func modelDescription(m *gallery.GalleryModel) elem.Node {
|
||||
urls := []elem.Node{}
|
||||
for _, url := range m.URLs {
|
||||
urls = append(urls,
|
||||
elem.Li(attrs.Props{}, link(url, url)),
|
||||
)
|
||||
}
|
||||
|
||||
actionDiv := func(m *gallery.GalleryModel) elem.Node {
|
||||
galleryID := fmt.Sprintf("%s@%s", m.Gallery.Name, m.Name)
|
||||
currentlyProcessing := processTracker.Exists(galleryID)
|
||||
jobID := ""
|
||||
isDeletionOp := false
|
||||
if currentlyProcessing {
|
||||
status := galleryService.GetStatus(galleryID)
|
||||
if status != nil && status.Deletion {
|
||||
isDeletionOp = true
|
||||
}
|
||||
jobID = processTracker.Get(galleryID)
|
||||
// TODO:
|
||||
// case not handled, if status == nil : "Waiting"
|
||||
}
|
||||
|
||||
nodes := []elem.Node{
|
||||
cardSpan("Repository: "+m.Gallery.Name, "fa-brands fa-git-alt"),
|
||||
}
|
||||
|
||||
if m.License != "" {
|
||||
nodes = append(nodes,
|
||||
cardSpan("License: "+m.License, "fas fa-book"),
|
||||
)
|
||||
}
|
||||
|
||||
tagsNodes := []elem.Node{}
|
||||
for _, tag := range m.Tags {
|
||||
tagsNodes = append(tagsNodes,
|
||||
searchableElement(tag, "fas fa-tag"),
|
||||
)
|
||||
}
|
||||
|
||||
nodes = append(nodes,
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"class": "flex flex-row flex-wrap content-center",
|
||||
},
|
||||
tagsNodes...,
|
||||
),
|
||||
tagsNodes := []elem.Node{}
|
||||
for _, tag := range m.Tags {
|
||||
tagsNodes = append(tagsNodes,
|
||||
searchableElement(tag, "fas fa-tag"),
|
||||
)
|
||||
}
|
||||
|
||||
for i, url := range m.URLs {
|
||||
nodes = append(nodes,
|
||||
link("Link #"+fmt.Sprintf("%d", i+1), url),
|
||||
)
|
||||
}
|
||||
|
||||
progressMessage := "Installation"
|
||||
if isDeletionOp {
|
||||
progressMessage = "Deletion"
|
||||
}
|
||||
|
||||
return elem.Div(
|
||||
return elem.Div(
|
||||
attrs.Props{
|
||||
"class": "p-6 text-surface dark:text-white",
|
||||
},
|
||||
elem.H5(
|
||||
attrs.Props{
|
||||
"class": "px-6 pt-4 pb-2",
|
||||
"class": "mb-2 text-xl font-bold leading-tight",
|
||||
},
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(m.Name)),
|
||||
),
|
||||
elem.Div( // small description
|
||||
attrs.Props{
|
||||
"class": "mb-4 text-sm truncate text-base",
|
||||
},
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(m.Description)),
|
||||
),
|
||||
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"id": modalName(m),
|
||||
"tabindex": "-1",
|
||||
"aria-hidden": "true",
|
||||
"class": "hidden overflow-y-auto overflow-x-hidden fixed top-0 right-0 left-0 z-50 justify-center items-center w-full md:inset-0 h-[calc(100%-1rem)] max-h-full",
|
||||
},
|
||||
elem.P(
|
||||
attrs.Props{
|
||||
"class": "mb-4 text-base",
|
||||
},
|
||||
nodes...,
|
||||
),
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"id": "action-div-" + dropBadChars(galleryID),
|
||||
"class": "relative p-4 w-full max-w-2xl max-h-full",
|
||||
},
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"class": "relative p-4 w-full max-w-2xl max-h-full bg-white rounded-lg shadow dark:bg-gray-700",
|
||||
},
|
||||
// header
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"class": "flex items-center justify-between p-4 md:p-5 border-b rounded-t dark:border-gray-600",
|
||||
},
|
||||
elem.H3(
|
||||
attrs.Props{
|
||||
"class": "text-xl font-semibold text-gray-900 dark:text-white",
|
||||
},
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(m.Name)),
|
||||
),
|
||||
elem.Button( // close button
|
||||
attrs.Props{
|
||||
"class": "text-gray-400 bg-transparent hover:bg-gray-200 hover:text-gray-900 rounded-lg text-sm w-8 h-8 ms-auto inline-flex justify-center items-center dark:hover:bg-gray-600 dark:hover:text-white",
|
||||
"data-modal-hide": modalName(m),
|
||||
},
|
||||
elem.Raw(
|
||||
`<svg class="w-3 h-3" aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 14 14">
|
||||
<path stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="m1 1 6 6m0 0 6 6M7 7l6-6M7 7l-6 6"/>
|
||||
</svg>`,
|
||||
),
|
||||
elem.Span(
|
||||
attrs.Props{
|
||||
"class": "sr-only",
|
||||
},
|
||||
elem.Text("Close modal"),
|
||||
),
|
||||
),
|
||||
),
|
||||
// body
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"class": "p-4 md:p-5 space-y-4",
|
||||
},
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"class": "flex justify-center items-center",
|
||||
},
|
||||
elem.Img(attrs.Props{
|
||||
// "class": "rounded-t-lg object-fit object-center h-96",
|
||||
"class": "lazy rounded-t-lg max-h-48 max-w-96 object-cover mt-3 entered loaded",
|
||||
"src": m.Icon,
|
||||
"loading": "lazy",
|
||||
}),
|
||||
),
|
||||
elem.P(
|
||||
attrs.Props{
|
||||
"class": "text-base leading-relaxed text-gray-500 dark:text-gray-400",
|
||||
},
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(m.Description)),
|
||||
),
|
||||
elem.Hr(
|
||||
attrs.Props{},
|
||||
),
|
||||
elem.P(
|
||||
attrs.Props{
|
||||
"class": "text-sm font-semibold text-gray-900 dark:text-white",
|
||||
},
|
||||
elem.Text("Links"),
|
||||
),
|
||||
elem.Ul(
|
||||
attrs.Props{},
|
||||
urls...,
|
||||
),
|
||||
elem.If(
|
||||
len(m.Tags) > 0,
|
||||
elem.Div(
|
||||
attrs.Props{},
|
||||
elem.P(
|
||||
attrs.Props{
|
||||
"class": "text-sm mb-5 font-semibold text-gray-900 dark:text-white",
|
||||
},
|
||||
elem.Text("Tags"),
|
||||
),
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"class": "flex flex-row flex-wrap content-center",
|
||||
},
|
||||
tagsNodes...,
|
||||
),
|
||||
),
|
||||
elem.Div(attrs.Props{}),
|
||||
),
|
||||
),
|
||||
// Footer
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"class": "flex items-center p-4 md:p-5 border-t border-gray-200 rounded-b dark:border-gray-600",
|
||||
},
|
||||
elem.Button(
|
||||
attrs.Props{
|
||||
"data-modal-hide": modalName(m),
|
||||
"class": "py-2.5 px-5 ms-3 text-sm font-medium text-gray-900 focus:outline-none bg-white rounded-lg border border-gray-200 hover:bg-gray-100 hover:text-blue-700 focus:z-10 focus:ring-4 focus:ring-gray-100 dark:focus:ring-gray-700 dark:bg-gray-800 dark:text-gray-400 dark:border-gray-600 dark:hover:text-white dark:hover:bg-gray-700",
|
||||
},
|
||||
elem.Text("Close"),
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
func modelActionItems(m *gallery.GalleryModel, processTracker ProcessTracker, galleryService *services.GalleryService) elem.Node {
|
||||
galleryID := fmt.Sprintf("%s@%s", m.Gallery.Name, m.Name)
|
||||
currentlyProcessing := processTracker.Exists(galleryID)
|
||||
jobID := ""
|
||||
isDeletionOp := false
|
||||
if currentlyProcessing {
|
||||
status := galleryService.GetStatus(galleryID)
|
||||
if status != nil && status.Deletion {
|
||||
isDeletionOp = true
|
||||
}
|
||||
jobID = processTracker.Get(galleryID)
|
||||
// TODO:
|
||||
// case not handled, if status == nil : "Waiting"
|
||||
}
|
||||
|
||||
nodes := []elem.Node{
|
||||
cardSpan("Repository: "+m.Gallery.Name, "fa-brands fa-git-alt"),
|
||||
}
|
||||
|
||||
if m.License != "" {
|
||||
nodes = append(nodes,
|
||||
cardSpan("License: "+m.License, "fas fa-book"),
|
||||
)
|
||||
}
|
||||
/*
|
||||
tagsNodes := []elem.Node{}
|
||||
|
||||
for _, tag := range m.Tags {
|
||||
tagsNodes = append(tagsNodes,
|
||||
searchableElement(tag, "fas fa-tag"),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
nodes = append(nodes,
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"class": "flex flex-row flex-wrap content-center",
|
||||
},
|
||||
tagsNodes...,
|
||||
),
|
||||
)
|
||||
|
||||
for i, url := range m.URLs {
|
||||
nodes = append(nodes,
|
||||
buttonLink("Link #"+fmt.Sprintf("%d", i+1), url),
|
||||
)
|
||||
}
|
||||
*/
|
||||
|
||||
progressMessage := "Installation"
|
||||
if isDeletionOp {
|
||||
progressMessage = "Deletion"
|
||||
}
|
||||
|
||||
return elem.Div(
|
||||
attrs.Props{
|
||||
"class": "px-6 pt-4 pb-2",
|
||||
},
|
||||
elem.P(
|
||||
attrs.Props{
|
||||
"class": "mb-4 text-base",
|
||||
},
|
||||
nodes...,
|
||||
),
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"id": "action-div-" + dropBadChars(galleryID),
|
||||
"class": "flow-root", // To order buttons left and right
|
||||
},
|
||||
infoButton(m),
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"class": "float-right",
|
||||
},
|
||||
elem.If(
|
||||
currentlyProcessing,
|
||||
@@ -470,14 +335,18 @@ func ListModels(models []*gallery.GalleryModel, processTracker ProcessTracker, g
|
||||
elem.Node(elem.Div(
|
||||
attrs.Props{},
|
||||
reInstallButton(m.ID()),
|
||||
deleteButton(m.ID(), m.Name),
|
||||
deleteButton(m.ID()),
|
||||
)),
|
||||
installButton(m.ID()),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
func ListModels(models []*gallery.GalleryModel, processTracker ProcessTracker, galleryService *services.GalleryService) string {
|
||||
modelsElements := []elem.Node{}
|
||||
|
||||
for _, m := range models {
|
||||
elems := []elem.Node{}
|
||||
@@ -521,7 +390,10 @@ func ListModels(models []*gallery.GalleryModel, processTracker ProcessTracker, g
|
||||
))
|
||||
}
|
||||
|
||||
elems = append(elems, descriptionDiv(m), actionDiv(m))
|
||||
elems = append(elems,
|
||||
modelDescription(m),
|
||||
modelActionItems(m, processTracker, galleryService),
|
||||
)
|
||||
modelsElements = append(modelsElements,
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
|
||||
147
core/http/elements/p2p.go
Normal file
147
core/http/elements/p2p.go
Normal file
@@ -0,0 +1,147 @@
|
||||
package elements
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/chasefleming/elem-go"
|
||||
"github.com/chasefleming/elem-go/attrs"
|
||||
"github.com/microcosm-cc/bluemonday"
|
||||
"github.com/mudler/LocalAI/core/p2p"
|
||||
)
|
||||
|
||||
func renderElements(n []elem.Node) string {
|
||||
render := ""
|
||||
for _, r := range n {
|
||||
render += r.Render()
|
||||
}
|
||||
return render
|
||||
}
|
||||
|
||||
func P2PNodeStats(nodes []p2p.NodeData) string {
|
||||
/*
|
||||
<div class="bg-gray-800 p-6 rounded-lg shadow-lg text-left">
|
||||
<p class="text-xl font-semibold text-gray-200">Total Workers Detected: {{ len .Nodes }}</p>
|
||||
{{ $online := 0 }}
|
||||
{{ range .Nodes }}
|
||||
{{ if .IsOnline }}
|
||||
{{ $online = add $online 1 }}
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
<p class="text-xl font-semibold text-gray-200">Total Online Workers: {{$online}}</p>
|
||||
</div>
|
||||
*/
|
||||
|
||||
online := 0
|
||||
for _, n := range nodes {
|
||||
if n.IsOnline() {
|
||||
online++
|
||||
}
|
||||
}
|
||||
|
||||
class := "text-green-500"
|
||||
if online == 0 {
|
||||
class = "text-red-500"
|
||||
}
|
||||
/*
|
||||
<i class="fas fa-circle animate-pulse text-green-500 ml-2 mr-1"></i>
|
||||
*/
|
||||
circle := elem.I(attrs.Props{
|
||||
"class": "fas fa-circle animate-pulse " + class + " ml-2 mr-1",
|
||||
})
|
||||
nodesElements := []elem.Node{
|
||||
elem.Span(
|
||||
attrs.Props{
|
||||
"class": class,
|
||||
},
|
||||
circle,
|
||||
elem.Text(fmt.Sprintf("%d", online)),
|
||||
),
|
||||
elem.Span(
|
||||
attrs.Props{
|
||||
"class": "text-gray-200",
|
||||
},
|
||||
elem.Text(fmt.Sprintf("/%d", len(nodes))),
|
||||
),
|
||||
}
|
||||
|
||||
return renderElements(nodesElements)
|
||||
}
|
||||
|
||||
func P2PNodeBoxes(nodes []p2p.NodeData) string {
|
||||
/*
|
||||
<div class="bg-gray-800 p-4 rounded-lg shadow-lg text-left">
|
||||
<div class="flex items-center mb-2">
|
||||
<i class="fas fa-desktop text-gray-400 mr-2"></i>
|
||||
<span class="text-gray-200 font-semibold">{{.ID}}</span>
|
||||
</div>
|
||||
<p class="text-sm text-gray-400 mt-2 flex items-center">
|
||||
Status:
|
||||
<i class="fas fa-circle {{ if .IsOnline }}text-green-500{{ else }}text-red-500{{ end }} ml-2 mr-1"></i>
|
||||
<span class="{{ if .IsOnline }}text-green-400{{ else }}text-red-400{{ end }}">
|
||||
{{ if .IsOnline }}Online{{ else }}Offline{{ end }}
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
*/
|
||||
|
||||
nodesElements := []elem.Node{}
|
||||
|
||||
for _, n := range nodes {
|
||||
|
||||
nodesElements = append(nodesElements,
|
||||
elem.Div(
|
||||
attrs.Props{
|
||||
"class": "bg-gray-700 p-6 rounded-lg shadow-lg text-left",
|
||||
},
|
||||
elem.P(
|
||||
attrs.Props{
|
||||
"class": "text-sm text-gray-400 mt-2 flex",
|
||||
},
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fas fa-desktop text-gray-400 mr-2",
|
||||
},
|
||||
),
|
||||
elem.Text("Name: "),
|
||||
elem.Span(
|
||||
attrs.Props{
|
||||
"class": "text-gray-200 font-semibold ml-2 mr-1",
|
||||
},
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(n.ID)),
|
||||
),
|
||||
elem.Text("Status: "),
|
||||
elem.If(
|
||||
n.IsOnline(),
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fas fa-circle animate-pulse text-green-500 ml-2 mr-1",
|
||||
},
|
||||
),
|
||||
elem.I(
|
||||
attrs.Props{
|
||||
"class": "fas fa-circle animate-pulse text-red-500 ml-2 mr-1",
|
||||
},
|
||||
),
|
||||
),
|
||||
elem.If(
|
||||
n.IsOnline(),
|
||||
elem.Span(
|
||||
attrs.Props{
|
||||
"class": "text-green-400",
|
||||
},
|
||||
|
||||
elem.Text("Online"),
|
||||
),
|
||||
elem.Span(
|
||||
attrs.Props{
|
||||
"class": "text-red-400",
|
||||
},
|
||||
elem.Text("Offline"),
|
||||
),
|
||||
),
|
||||
),
|
||||
))
|
||||
}
|
||||
|
||||
return renderElements(nodesElements)
|
||||
}
|
||||
89
core/http/elements/progressbar.go
Normal file
89
core/http/elements/progressbar.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package elements
|
||||
|
||||
import (
|
||||
"github.com/chasefleming/elem-go"
|
||||
"github.com/chasefleming/elem-go/attrs"
|
||||
"github.com/microcosm-cc/bluemonday"
|
||||
)
|
||||
|
||||
func DoneProgress(galleryID, text string, showDelete bool) string {
|
||||
return elem.Div(
|
||||
attrs.Props{
|
||||
"id": "action-div-" + dropBadChars(galleryID),
|
||||
},
|
||||
elem.H3(
|
||||
attrs.Props{
|
||||
"role": "status",
|
||||
"id": "pblabel",
|
||||
"tabindex": "-1",
|
||||
"autofocus": "",
|
||||
},
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(text)),
|
||||
),
|
||||
elem.If(showDelete, deleteButton(galleryID), reInstallButton(galleryID)),
|
||||
).Render()
|
||||
}
|
||||
|
||||
func ErrorProgress(err, galleryName string) string {
|
||||
return elem.Div(
|
||||
attrs.Props{},
|
||||
elem.H3(
|
||||
attrs.Props{
|
||||
"role": "status",
|
||||
"id": "pblabel",
|
||||
"tabindex": "-1",
|
||||
"autofocus": "",
|
||||
},
|
||||
elem.Text("Error "+bluemonday.StrictPolicy().Sanitize(err)),
|
||||
),
|
||||
installButton(galleryName),
|
||||
).Render()
|
||||
}
|
||||
|
||||
func ProgressBar(progress string) string {
|
||||
return elem.Div(attrs.Props{
|
||||
"class": "progress",
|
||||
"role": "progressbar",
|
||||
"aria-valuemin": "0",
|
||||
"aria-valuemax": "100",
|
||||
"aria-valuenow": "0",
|
||||
"aria-labelledby": "pblabel",
|
||||
},
|
||||
elem.Div(attrs.Props{
|
||||
"id": "pb",
|
||||
"class": "progress-bar",
|
||||
"style": "width:" + progress + "%",
|
||||
}),
|
||||
).Render()
|
||||
}
|
||||
|
||||
func StartProgressBar(uid, progress, text string) string {
|
||||
if progress == "" {
|
||||
progress = "0"
|
||||
}
|
||||
return elem.Div(
|
||||
attrs.Props{
|
||||
"hx-trigger": "done",
|
||||
"hx-get": "/browse/job/" + uid,
|
||||
"hx-swap": "outerHTML",
|
||||
"hx-target": "this",
|
||||
},
|
||||
elem.H3(
|
||||
attrs.Props{
|
||||
"role": "status",
|
||||
"id": "pblabel",
|
||||
"tabindex": "-1",
|
||||
"autofocus": "",
|
||||
},
|
||||
elem.Text(bluemonday.StrictPolicy().Sanitize(text)), //Perhaps overly defensive
|
||||
elem.Div(attrs.Props{
|
||||
"hx-get": "/browse/job/progress/" + uid,
|
||||
"hx-trigger": "every 600ms",
|
||||
"hx-target": "this",
|
||||
"hx-swap": "innerHTML",
|
||||
},
|
||||
elem.Raw(ProgressBar(progress)),
|
||||
),
|
||||
),
|
||||
).Render()
|
||||
}
|
||||
@@ -9,16 +9,19 @@ import (
|
||||
"github.com/gofiber/fiber/v2"
|
||||
"github.com/mudler/LocalAI/core/schema"
|
||||
"github.com/rs/zerolog/log"
|
||||
|
||||
"github.com/mudler/LocalAI/pkg/utils"
|
||||
)
|
||||
|
||||
// TTSEndpoint is the OpenAI Speech API endpoint https://platform.openai.com/docs/api-reference/audio/createSpeech
|
||||
// @Summary Generates audio from the input text.
|
||||
// @Accept json
|
||||
// @Produce audio/x-wav
|
||||
// @Param request body schema.TTSRequest true "query params"
|
||||
// @Success 200 {string} binary "generated audio/wav file"
|
||||
// @Router /v1/audio/speech [post]
|
||||
// @Router /tts [post]
|
||||
//
|
||||
// @Summary Generates audio from the input text.
|
||||
// @Accept json
|
||||
// @Produce audio/x-wav
|
||||
// @Param request body schema.TTSRequest true "query params"
|
||||
// @Success 200 {string} binary "generated audio/wav file"
|
||||
// @Router /v1/audio/speech [post]
|
||||
// @Router /tts [post]
|
||||
func TTSEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *config.ApplicationConfig) func(c *fiber.Ctx) error {
|
||||
return func(c *fiber.Ctx) error {
|
||||
|
||||
@@ -67,6 +70,13 @@ func TTSEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfi
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Convert generated file to target format
|
||||
filePath, err = utils.AudioConvert(filePath, input.Format)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.Download(filePath)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,6 +136,11 @@ func ImageEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, appCon
|
||||
config.Backend = model.StableDiffusionBackend
|
||||
}
|
||||
|
||||
if !strings.Contains(input.Size, "x") {
|
||||
input.Size = "512x512"
|
||||
log.Warn().Msgf("Invalid size, using default 512x512")
|
||||
}
|
||||
|
||||
sizeParts := strings.Split(input.Size, "x")
|
||||
if len(sizeParts) != 2 {
|
||||
return fmt.Errorf("invalid value for 'size'")
|
||||
|
||||
@@ -149,6 +149,10 @@ func updateRequestConfig(config *config.BackendConfig, input *schema.OpenAIReque
|
||||
// Decode each request's message content
|
||||
imgIndex, vidIndex, audioIndex := 0, 0, 0
|
||||
for i, m := range input.Messages {
|
||||
nrOfImgsInMessage := 0
|
||||
nrOfVideosInMessage := 0
|
||||
nrOfAudiosInMessage := 0
|
||||
|
||||
switch content := m.Content.(type) {
|
||||
case string:
|
||||
input.Messages[i].StringContent = content
|
||||
@@ -156,11 +160,16 @@ func updateRequestConfig(config *config.BackendConfig, input *schema.OpenAIReque
|
||||
dat, _ := json.Marshal(content)
|
||||
c := []schema.Content{}
|
||||
json.Unmarshal(dat, &c)
|
||||
|
||||
textContent := ""
|
||||
// we will template this at the end
|
||||
|
||||
CONTENT:
|
||||
for _, pp := range c {
|
||||
switch pp.Type {
|
||||
case "text":
|
||||
input.Messages[i].StringContent = pp.Text
|
||||
textContent += pp.Text
|
||||
//input.Messages[i].StringContent = pp.Text
|
||||
case "video", "video_url":
|
||||
// Decode content as base64 either if it's an URL or base64 text
|
||||
base64, err := utils.GetContentURIAsBase64(pp.VideoURL.URL)
|
||||
@@ -169,14 +178,8 @@ func updateRequestConfig(config *config.BackendConfig, input *schema.OpenAIReque
|
||||
continue CONTENT
|
||||
}
|
||||
input.Messages[i].StringVideos = append(input.Messages[i].StringVideos, base64) // TODO: make sure that we only return base64 stuff
|
||||
|
||||
t := "[vid-{{.ID}}]{{.Text}}"
|
||||
if config.TemplateConfig.Video != "" {
|
||||
t = config.TemplateConfig.Video
|
||||
}
|
||||
// set a placeholder for each image
|
||||
input.Messages[i].StringContent, _ = templates.TemplateMultiModal(t, vidIndex, input.Messages[i].StringContent)
|
||||
vidIndex++
|
||||
nrOfVideosInMessage++
|
||||
case "audio_url", "audio":
|
||||
// Decode content as base64 either if it's an URL or base64 text
|
||||
base64, err := utils.GetContentURIAsBase64(pp.AudioURL.URL)
|
||||
@@ -185,13 +188,8 @@ func updateRequestConfig(config *config.BackendConfig, input *schema.OpenAIReque
|
||||
continue CONTENT
|
||||
}
|
||||
input.Messages[i].StringAudios = append(input.Messages[i].StringAudios, base64) // TODO: make sure that we only return base64 stuff
|
||||
// set a placeholder for each image
|
||||
t := "[audio-{{.ID}}]{{.Text}}"
|
||||
if config.TemplateConfig.Audio != "" {
|
||||
t = config.TemplateConfig.Audio
|
||||
}
|
||||
input.Messages[i].StringContent, _ = templates.TemplateMultiModal(t, audioIndex, input.Messages[i].StringContent)
|
||||
audioIndex++
|
||||
nrOfAudiosInMessage++
|
||||
case "image_url", "image":
|
||||
// Decode content as base64 either if it's an URL or base64 text
|
||||
base64, err := utils.GetContentURIAsBase64(pp.ImageURL.URL)
|
||||
@@ -200,16 +198,21 @@ func updateRequestConfig(config *config.BackendConfig, input *schema.OpenAIReque
|
||||
continue CONTENT
|
||||
}
|
||||
|
||||
t := "[img-{{.ID}}]{{.Text}}"
|
||||
if config.TemplateConfig.Image != "" {
|
||||
t = config.TemplateConfig.Image
|
||||
}
|
||||
input.Messages[i].StringImages = append(input.Messages[i].StringImages, base64) // TODO: make sure that we only return base64 stuff
|
||||
// set a placeholder for each image
|
||||
input.Messages[i].StringContent, _ = templates.TemplateMultiModal(t, imgIndex, input.Messages[i].StringContent)
|
||||
|
||||
imgIndex++
|
||||
nrOfImgsInMessage++
|
||||
}
|
||||
}
|
||||
|
||||
input.Messages[i].StringContent, _ = templates.TemplateMultiModal(config.TemplateConfig.Multimodal, templates.MultiModalOptions{
|
||||
TotalImages: imgIndex,
|
||||
TotalVideos: vidIndex,
|
||||
TotalAudios: audioIndex,
|
||||
ImagesInMessage: nrOfImgsInMessage,
|
||||
VideosInMessage: nrOfVideosInMessage,
|
||||
AudiosInMessage: nrOfAudiosInMessage,
|
||||
}, textContent)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -42,7 +42,9 @@ func RegisterLocalAIRoutes(app *fiber.App,
|
||||
app.Post("/stores/get", localai.StoresGetEndpoint(sl, appConfig))
|
||||
app.Post("/stores/find", localai.StoresFindEndpoint(sl, appConfig))
|
||||
|
||||
app.Get("/metrics", localai.LocalAIMetricsEndpoint())
|
||||
if !appConfig.DisableMetrics {
|
||||
app.Get("/metrics", localai.LocalAIMetricsEndpoint())
|
||||
}
|
||||
|
||||
// Experimental Backend Statistics Module
|
||||
backendMonitorService := services.NewBackendMonitorService(ml, cl, appConfig) // Split out for now
|
||||
|
||||
2
core/http/static/assets/flowbite.min.js
vendored
Normal file
2
core/http/static/assets/flowbite.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
@@ -1,5 +1,5 @@
|
||||
<footer class="text-center py-8">
|
||||
LocalAI Version {{.Version}}<br>
|
||||
<a href='https://localai.io' class="text-blue-400 hover:text-blue-600" target="_blank">LocalAI</a> © 2023-2024 <a href='https://mudler.pm' class="text-blue-400 hover:text-blue-600" target="_blank">Ettore Di Giacinto</a>
|
||||
<a href='https://github.com/mudler/LocalAI' class="text-blue-400 hover:text-blue-600" target="_blank">LocalAI</a> © 2023-2024 <a href='https://mudler.pm' class="text-blue-400 hover:text-blue-600" target="_blank">Ettore Di Giacinto</a>
|
||||
</footer>
|
||||
<script src="/static/assets/tw-elements.js"></script>
|
||||
|
||||
@@ -57,6 +57,7 @@
|
||||
<link href="/static/assets/fontawesome/css/fontawesome.css" rel="stylesheet" />
|
||||
<link href="/static/assets/fontawesome/css/brands.css" rel="stylesheet" />
|
||||
<link href="/static/assets/fontawesome/css/solid.css" rel="stylesheet" />
|
||||
<script src="/static/assets/flowbite.min.js"></script>
|
||||
<script src="/static/assets/htmx.js" crossorigin="anonymous"></script>
|
||||
<!-- P2P Animation START -->
|
||||
<style>
|
||||
@@ -118,4 +119,11 @@
|
||||
100% { transform: rotate(0deg); } /* Return to center */
|
||||
}
|
||||
</style>
|
||||
|
||||
<!-- https://stackoverflow.com/questions/76051980/flowbite-component-not-working-when-loaded-via-htmx-django-project -->
|
||||
<script>
|
||||
htmx.onLoad(function(content) {
|
||||
initFlowbite();
|
||||
})
|
||||
</script>
|
||||
</head>
|
||||
@@ -231,10 +231,14 @@ func discoveryTunnels(ctx context.Context, n *node.Node, token, servicesID strin
|
||||
|
||||
data := ledger.LastBlock().Storage[servicesID]
|
||||
|
||||
zlog.Debug().Any("data", ledger.LastBlock().Storage).Msg("Ledger data")
|
||||
if logLevel == logLevelDebug {
|
||||
// We want to surface this debugging data only if p2p logging is set to debug
|
||||
// (and not generally the whole application, as this can be really noisy)
|
||||
zlog.Debug().Any("data", ledger.LastBlock().Storage).Msg("Ledger data")
|
||||
}
|
||||
|
||||
for k, v := range data {
|
||||
zlog.Debug().Msgf("New worker found in the ledger data '%s'", k)
|
||||
// New worker found in the ledger data as k (worker id)
|
||||
nd := &NodeData{}
|
||||
if err := v.Unmarshal(nd); err != nil {
|
||||
zlog.Error().Msg("cannot unmarshal node data")
|
||||
@@ -269,7 +273,7 @@ func ensureService(ctx context.Context, n *node.Node, nd *NodeData, sserv string
|
||||
if ndService, found := service[nd.Name]; !found {
|
||||
if !nd.IsOnline() {
|
||||
// if node is offline and not present, do nothing
|
||||
zlog.Debug().Msgf("Node %s is offline", nd.ID)
|
||||
// Node nd.ID is offline
|
||||
return
|
||||
}
|
||||
|
||||
@@ -381,10 +385,6 @@ func newNodeOpts(token string) ([]node.Option, error) {
|
||||
noDHT := os.Getenv("LOCALAI_P2P_DISABLE_DHT") == "true"
|
||||
noLimits := os.Getenv("LOCALAI_P2P_ENABLE_LIMITS") == "true"
|
||||
|
||||
loglevel := os.Getenv("LOCALAI_P2P_LOGLEVEL")
|
||||
if loglevel == "" {
|
||||
loglevel = "info"
|
||||
}
|
||||
libp2ploglevel := os.Getenv("LOCALAI_LIBP2P_LOGLEVEL")
|
||||
if libp2ploglevel == "" {
|
||||
libp2ploglevel = "fatal"
|
||||
@@ -396,7 +396,7 @@ func newNodeOpts(token string) ([]node.Option, error) {
|
||||
},
|
||||
NetworkToken: token,
|
||||
LowProfile: false,
|
||||
LogLevel: loglevel,
|
||||
LogLevel: logLevel,
|
||||
Libp2pLogLevel: libp2ploglevel,
|
||||
Ledger: config.Ledger{
|
||||
SyncInterval: defaultInterval,
|
||||
|
||||
19
core/p2p/p2p_common.go
Normal file
19
core/p2p/p2p_common.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package p2p
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var logLevel = strings.ToLower(os.Getenv("LOCALAI_P2P_LOGLEVEL"))
|
||||
|
||||
const (
|
||||
logLevelDebug = "debug"
|
||||
logLevelInfo = "info"
|
||||
)
|
||||
|
||||
func init() {
|
||||
if logLevel == "" {
|
||||
logLevel = logLevelInfo
|
||||
}
|
||||
}
|
||||
@@ -32,6 +32,7 @@ type TTSRequest struct {
|
||||
Voice string `json:"voice" yaml:"voice"` // voice audio file or speaker id
|
||||
Backend string `json:"backend" yaml:"backend"`
|
||||
Language string `json:"language,omitempty" yaml:"language,omitempty"` // (optional) language to use with TTS model
|
||||
Format string `json:"response_format,omitempty" yaml:"response_format,omitempty"` // (optional) output format
|
||||
}
|
||||
|
||||
type StoresSet struct {
|
||||
|
||||
@@ -160,15 +160,10 @@ func Startup(opts ...config.AppOption) (*config.BackendConfigLoader, *model.Mode
|
||||
|
||||
log.Debug().Msgf("Auto loading model %s into memory from file: %s", m, cfg.Model)
|
||||
|
||||
o := backend.ModelOptions(*cfg, options, []model.Option{})
|
||||
o := backend.ModelOptions(*cfg, options)
|
||||
|
||||
var backendErr error
|
||||
if cfg.Backend != "" {
|
||||
o = append(o, model.WithBackendString(cfg.Backend))
|
||||
_, backendErr = ml.BackendLoader(o...)
|
||||
} else {
|
||||
_, backendErr = ml.GreedyLoader(o...)
|
||||
}
|
||||
_, backendErr = ml.Load(o...)
|
||||
if backendErr != nil {
|
||||
return nil, nil, nil, err
|
||||
}
|
||||
|
||||
@@ -201,3 +201,21 @@ curl -L http://localhost:8080/tts \
|
||||
"input": "Bonjour, je suis Ana Florence. Comment puis-je vous aider?"
|
||||
}' | aplay
|
||||
```
|
||||
|
||||
## Response format
|
||||
|
||||
To provide some compatibility with OpenAI API regarding `response_format`, ffmpeg must be installed (or a docker image including ffmpeg used) to leverage converting the generated wav file before the api provide its response.
|
||||
|
||||
Warning regarding a change in behaviour. Before this addition, the parameter was ignored and a wav file was always returned, with potential codec errors later in the integration (like trying to decode a mp3 file from a wav, which is the default format used by OpenAI)
|
||||
|
||||
Supported format thanks to ffmpeg are `wav`, `mp3`, `aac`, `flac`, `opus`, defaulting to `wav` if an unknown or no format is provided.
|
||||
|
||||
```bash
|
||||
curl http://localhost:8080/tts -H "Content-Type: application/json" -d '{
|
||||
"input": "Hello world",
|
||||
"model": "tts",
|
||||
"response_format": "mp3"
|
||||
}'
|
||||
```
|
||||
|
||||
If a `response_format` is added in the query (other than `wav`) and ffmpeg is not available, the call will fail.
|
||||
|
||||
@@ -30,6 +30,19 @@ For a full list of options, refer to the [Installer Options]({{% relref "docs/ad
|
||||
|
||||
Binaries can also be [manually downloaded]({{% relref "docs/reference/binaries" %}}).
|
||||
|
||||
## Using Homebrew on MacOS
|
||||
|
||||
{{% alert icon="⚠️" %}}
|
||||
The Homebrew formula currently doesn't have the same options than the bash script
|
||||
{{% /alert %}}
|
||||
|
||||
You can install Homebrew's [LocalAI](https://formulae.brew.sh/formula/localai) with the following command:
|
||||
|
||||
```
|
||||
brew install localai
|
||||
```
|
||||
|
||||
|
||||
## Using Container Images or Kubernetes
|
||||
|
||||
LocalAI is available as a container image compatible with various container engines such as Docker, Podman, and Kubernetes. Container images are published on [quay.io](https://quay.io/repository/go-skynet/local-ai?tab=tags&tag=latest) and [Docker Hub](https://hub.docker.com/r/localai/localai).
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
{
|
||||
"version": "v2.22.0"
|
||||
"version": "v2.22.1"
|
||||
}
|
||||
|
||||
133
docs/layouts/partials/docs/top-header.html
Normal file
133
docs/layouts/partials/docs/top-header.html
Normal file
@@ -0,0 +1,133 @@
|
||||
<!-- Top Header -->
|
||||
<div id="top-header" class="top-header d-print-none">
|
||||
<div class="header-bar d-flex justify-content-between">
|
||||
<div class="d-flex align-items-center">
|
||||
<a href='{{ with .Site.Params.docs.logoLinkURL }}{{ . }}{{ else }}{{ relLangURL "" }}{{ end }}' class="logo-icon me-3" aria-label="HomePage" alt="HomePage">
|
||||
<div class="small">
|
||||
{{ with resources.Get "images/logos/mark.svg" }}
|
||||
{{ .Content | safeHTML }}
|
||||
{{ end }}
|
||||
</div>
|
||||
<div class="big">
|
||||
{{ with resources.Get "images/logos/logo.svg" }}
|
||||
{{ .Content | safeHTML }}
|
||||
{{ end }}
|
||||
</div>
|
||||
</a>
|
||||
<button id="close-sidebar" class="btn btn-icon btn-soft">
|
||||
<span class="material-icons size-20 menu-icon align-middle">menu</span>
|
||||
</button>
|
||||
{{ if and (.Site.Params.docsearch.appID) (.Site.Params.docsearch.apiKey) -}}
|
||||
<span><div id="docsearch"></div></span>
|
||||
{{ end }}
|
||||
{{ if or (not (isset .Site.Params.flexsearch "enabled")) (eq .Site.Params.flexsearch.enabled true) -}}
|
||||
{{ if and (.Site.Params.docsearch.appID) (.Site.Params.docsearch.apiKey) -}}
|
||||
{{ else }}
|
||||
<!-- <form class="flexsearch position-relative flex-grow-1 ms-2 me-lg-2 d-none">
|
||||
<input id="flexsearch" class="form-control is-search" type="search" placeholder="{{ i18n "search_title" }}" aria-label="{{ i18n "search_title" }}" autocomplete="off">
|
||||
<div id="suggestions" class="shadow bg-white rounded d-none"></div>
|
||||
</form> -->
|
||||
<button id="flexsearch-button" class="ms-3 btn btn-soft" data-bs-toggle="collapse" data-bs-target="#FlexSearchCollapse" aria-expanded="false" aria-controls="FlexSearchCollapse">
|
||||
<span class="material-icons size-20 menu-icon align-middle">search</span>
|
||||
<span class="flexsearch-button-placeholder ms-1 me-2 d-none d-sm-block">{{ i18n "search_title" }}</span>
|
||||
<div class="d-none d-sm-block">
|
||||
<span class="flexsearch-button-keys">
|
||||
<kbd class="flexsearch-button-cmd-key">
|
||||
<svg width="44" height="15"><path d="M2.118,11.5A1.519,1.519,0,0,1,1,11.042,1.583,1.583,0,0,1,1,8.815a1.519,1.519,0,0,1,1.113-.458h.715V6.643H2.118A1.519,1.519,0,0,1,1,6.185,1.519,1.519,0,0,1,.547,5.071,1.519,1.519,0,0,1,1,3.958,1.519,1.519,0,0,1,2.118,3.5a1.519,1.519,0,0,1,1.114.458A1.519,1.519,0,0,1,3.69,5.071v.715H5.4V5.071A1.564,1.564,0,0,1,6.976,3.5,1.564,1.564,0,0,1,8.547,5.071,1.564,1.564,0,0,1,6.976,6.643H6.261V8.357h.715a1.575,1.575,0,0,1,1.113,2.685,1.583,1.583,0,0,1-2.227,0A1.519,1.519,0,0,1,5.4,9.929V9.214H3.69v.715a1.519,1.519,0,0,1-.458,1.113A1.519,1.519,0,0,1,2.118,11.5Zm0-.857a.714.714,0,0,0,.715-.714V9.214H2.118a.715.715,0,1,0,0,1.429Zm4.858,0a.715.715,0,1,0,0-1.429H6.261v.715a.714.714,0,0,0,.715.714ZM3.69,8.357H5.4V6.643H3.69ZM2.118,5.786h.715V5.071a.714.714,0,0,0-.715-.714.715.715,0,0,0-.5,1.22A.686.686,0,0,0,2.118,5.786Zm4.143,0h.715a.715.715,0,0,0,.5-1.22.715.715,0,0,0-1.22.5Z" fill="currentColor"></path><path d="M12.4,11.475H11.344l3.879-7.95h1.056Z" fill="currentColor"></path><path d="M25.073,5.384l-.864.576a2.121,2.121,0,0,0-1.786-.923,2.207,2.207,0,0,0-2.266,2.326,2.206,2.206,0,0,0,2.266,2.325,2.1,2.1,0,0,0,1.782-.918l.84.617a3.108,3.108,0,0,1-2.622,1.293,3.217,3.217,0,0,1-3.349-3.317,3.217,3.217,0,0,1,3.349-3.317A3.046,3.046,0,0,1,25.073,5.384Z" fill="currentColor"></path><path d="M30.993,5.142h-2.07v5.419H27.891V5.142h-2.07V4.164h5.172Z" fill="currentColor"></path><path d="M34.67,4.164c1.471,0,2.266.658,2.266,1.851,0,1.087-.832,1.809-2.134,1.855l2.107,2.691h-1.28L33.591,7.87H33.07v2.691H32.038v-6.4Zm-1.6.969v1.8h1.572c.832,0,1.22-.3,1.22-.918s-.411-.882-1.22-.882Z" fill="currentColor"></path><path d="M42.883,10.561H38.31v-6.4h1.033V9.583h3.54Z" fill="currentColor"></path></svg>
|
||||
</kbd>
|
||||
<kbd class="flexsearch-button-key">
|
||||
<svg width="15" height="15"><path d="M5.926,12.279H4.41L9.073,2.721H10.59Z" fill="currentColor"/></svg>
|
||||
</kbd>
|
||||
</span>
|
||||
</div>
|
||||
</button>
|
||||
{{ end }}
|
||||
{{ end -}}
|
||||
</div>
|
||||
<div class="d-flex align-items-center m-1">
|
||||
<h5>Star us on GitHub ! </h5>
|
||||
<script async defer src="https://buttons.github.io/buttons.js"></script>
|
||||
<a class="github-button" href="https://github.com/mudler/LocalAI" data-color-scheme="no-preference: light; light: light; dark: dark;" data-icon="octicon-star" data-size="large" data-show-count="true" aria-label="Star mudler/LocalAI on GitHub">Star</a>
|
||||
</div>
|
||||
<div class="d-flex align-items-center">
|
||||
<ul class="list-unstyled mb-0">
|
||||
{{ with $.Scratch.Get "social_list" }}
|
||||
{{ range . }}
|
||||
{{ $path := printf "images/social/%s.%s" . "svg" }}
|
||||
<li class="list-inline-item mb-0">
|
||||
<a href="{{ if eq . `rss` }} {{ `index.xml` | absURL }} {{ else }} https://{{ . }}.com/{{ index site.Params.social . }} {{ end }}" alt="{{ . }}" rel="noopener noreferrer" target="_blank">
|
||||
<div class="btn btn-icon btn-default border-0">
|
||||
{{ with resources.Get $path }}
|
||||
{{ .Content | safeHTML }}
|
||||
{{ end }}
|
||||
</div>
|
||||
</a>
|
||||
</li>
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
</ul>
|
||||
{{ if eq .Site.Params.docs.darkMode true -}}
|
||||
<button id="mode" class="btn btn-icon btn-default ms-2" type="button" aria-label="Toggle user interface mode">
|
||||
<span class="toggle-dark">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" height="30" width="30" viewBox="0 0 48 48" fill="currentColor">
|
||||
<title>{{ i18n "enable_dark_mode" | default "Enable dark mode" }}</title>
|
||||
<path d="M24 42q-7.5 0-12.75-5.25T6 24q0-7.5 5.25-12.75T24 6q.4 0 .85.025.45.025 1.15.075-1.8 1.6-2.8 3.95-1 2.35-1 4.95 0 4.5 3.15 7.65Q28.5 25.8 33 25.8q2.6 0 4.95-.925T41.9 22.3q.05.6.075.975Q42 23.65 42 24q0 7.5-5.25 12.75T24 42Zm0-3q5.45 0 9.5-3.375t5.05-7.925q-1.25.55-2.675.825Q34.45 28.8 33 28.8q-5.75 0-9.775-4.025T19.2 15q0-1.2.25-2.575.25-1.375.9-3.125-4.9 1.35-8.125 5.475Q9 18.9 9 24q0 6.25 4.375 10.625T24 39Zm-.2-14.85Z"/>
|
||||
</svg>
|
||||
</span>
|
||||
<span class="toggle-light">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" height="30" width="30" viewBox="0 0 48 48" fill="currentColor">
|
||||
<title>{{ i18n "enable_light_mode" | default "Enable light mode" }}</title>
|
||||
<path d="M24 31q2.9 0 4.95-2.05Q31 26.9 31 24q0-2.9-2.05-4.95Q26.9 17 24 17q-2.9 0-4.95 2.05Q17 21.1 17 24q0 2.9 2.05 4.95Q21.1 31 24 31Zm0 3q-4.15 0-7.075-2.925T14 24q0-4.15 2.925-7.075T24 14q4.15 0 7.075 2.925T34 24q0 4.15-2.925 7.075T24 34ZM3.5 25.5q-.65 0-1.075-.425Q2 24.65 2 24q0-.65.425-1.075Q2.85 22.5 3.5 22.5h5q.65 0 1.075.425Q10 23.35 10 24q0 .65-.425 1.075-.425.425-1.075.425Zm36 0q-.65 0-1.075-.425Q38 24.65 38 24q0-.65.425-1.075.425-.425 1.075-.425h5q.65 0 1.075.425Q46 23.35 46 24q0 .65-.425 1.075-.425.425-1.075.425ZM24 10q-.65 0-1.075-.425Q22.5 9.15 22.5 8.5v-5q0-.65.425-1.075Q23.35 2 24 2q.65 0 1.075.425.425.425.425 1.075v5q0 .65-.425 1.075Q24.65 10 24 10Zm0 36q-.65 0-1.075-.425-.425-.425-.425-1.075v-5q0-.65.425-1.075Q23.35 38 24 38q.65 0 1.075.425.425.425.425 1.075v5q0 .65-.425 1.075Q24.65 46 24 46ZM12 14.1l-2.85-2.8q-.45-.45-.425-1.075.025-.625.425-1.075.45-.45 1.075-.45t1.075.45L14.1 12q.4.45.4 1.05 0 .6-.4 1-.4.45-1.025.45-.625 0-1.075-.4Zm24.7 24.75L33.9 36q-.4-.45-.4-1.075t.45-1.025q.4-.45 1-.45t1.05.45l2.85 2.8q.45.45.425 1.075-.025.625-.425 1.075-.45.45-1.075.45t-1.075-.45ZM33.9 14.1q-.45-.45-.45-1.05 0-.6.45-1.05l2.8-2.85q.45-.45 1.075-.425.625.025 1.075.425.45.45.45 1.075t-.45 1.075L36 14.1q-.4.4-1.025.4-.625 0-1.075-.4ZM9.15 38.85q-.45-.45-.45-1.075t.45-1.075L12 33.9q.45-.45 1.05-.45.6 0 1.05.45.45.45.45 1.05 0 .6-.45 1.05l-2.8 2.85q-.45.45-1.075.425-.625-.025-1.075-.425ZM24 24Z"/>
|
||||
</svg>
|
||||
</span>
|
||||
</button>
|
||||
{{ end -}}
|
||||
{{ if .Site.IsMultiLingual }}
|
||||
<div class="dropdown">
|
||||
<button class="btn btn-link btn-default dropdown-toggle ps-2" type="button" data-bs-toggle="dropdown" aria-expanded="false">
|
||||
{{ site.Language.Lang | upper }}
|
||||
</button>
|
||||
<ul class="dropdown-menu text-end">
|
||||
{{ partial (printf "%s/%s" ($.Scratch.Get "pathName") "i18nlist") . }}
|
||||
</ul>
|
||||
</div>
|
||||
{{ end }}
|
||||
</div>
|
||||
</div>
|
||||
<!-- FlexSearch Input Start -->
|
||||
{{ if or (not (isset .Site.Params.flexsearch "enabled")) (eq .Site.Params.flexsearch.enabled true) -}}
|
||||
{{ if and (.Site.Params.docsearch.appID) (.Site.Params.docsearch.apiKey) -}}
|
||||
{{ else }}
|
||||
<div class="collapse" id="FlexSearchCollapse">
|
||||
<div class="flexsearch-container">
|
||||
<div class="flexsearch-keymap">
|
||||
<li>
|
||||
<kbd class="flexsearch-button-cmd-key"><svg width="15" height="15" aria-label="Arrow down" role="img"><g fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2"><path d="M7.5 3.5v8M10.5 8.5l-3 3-3-3"></path></g></svg></kbd>
|
||||
<kbd class="flexsearch-button-cmd-key"><svg width="15" height="15" aria-label="Arrow up" role="img"><g fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2"><path d="M7.5 11.5v-8M10.5 6.5l-3-3-3 3"></path></g></svg></kbd>
|
||||
<span class="flexsearch-key-label">{{ i18n "search_navigate" | default "to navigate" }}</span>
|
||||
</li>
|
||||
<li>
|
||||
<kbd class="flexsearch-button-cmd-key"><svg width="15" height="15" aria-label="Enter key" role="img"><g fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2"><path d="M12 3.53088v3c0 1-1 2-2 2H4M7 11.53088l-3-3 3-3"></path></g></svg></kbd>
|
||||
<span class="flexsearch-key-label">{{ i18n "search_select" | default "to select" }}</span>
|
||||
</li>
|
||||
<li>
|
||||
<kbd class="flexsearch-button-cmd-key"><svg width="15" height="15" aria-label="Escape key" role="img"><g fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2"><path d="M13.6167 8.936c-.1065.3583-.6883.962-1.4875.962-.7993 0-1.653-.9165-1.653-2.1258v-.5678c0-1.2548.7896-2.1016 1.653-2.1016.8634 0 1.3601.4778 1.4875 1.0724M9 6c-.1352-.4735-.7506-.9219-1.46-.8972-.7092.0246-1.344.57-1.344 1.2166s.4198.8812 1.3445.9805C8.465 7.3992 8.968 7.9337 9 8.5c.032.5663-.454 1.398-1.4595 1.398C6.6593 9.898 6 9 5.963 8.4851m-1.4748.5368c-.2635.5941-.8099.876-1.5443.876s-1.7073-.6248-1.7073-2.204v-.4603c0-1.0416.721-2.131 1.7073-2.131.9864 0 1.6425 1.031 1.5443 2.2492h-2.956"></path></g></svg></kbd>
|
||||
<span class="flexsearch-key-label">{{ i18n "search_close" | default "to close" }}</span>
|
||||
</li>
|
||||
</div>
|
||||
<form class="flexsearch position-relative flex-grow-1 ms-2 me-2">
|
||||
<div class="d-flex flex-row">
|
||||
<input id="flexsearch" class="form-control" type="search" placeholder="{{ i18n "search_title" }}" aria-label="{{ i18n "search_title" }}" autocomplete="off">
|
||||
<button id="hideFlexsearch" type="button" class="ms-2 btn btn-soft">
|
||||
{{ i18n "search_cancel" | default "cancel" }}
|
||||
</button>
|
||||
</div>
|
||||
<div id="suggestions" class="shadow rounded-1 d-none"></div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
{{ end }}
|
||||
{{ end }}
|
||||
<!-- FlexSearch Input End -->
|
||||
</div>
|
||||
<!-- Top Header -->
|
||||
2
docs/themes/hugo-theme-relearn
vendored
2
docs/themes/hugo-theme-relearn
vendored
Submodule docs/themes/hugo-theme-relearn updated: 007cc20686...28fce6b04c
@@ -6,4 +6,4 @@
|
||||
### For models with an entire YAML file to be embededd, put the file inside the `models`
|
||||
### directory, it will be automatically available with the file name as key (without the .yaml extension)
|
||||
|
||||
phi-2: "github://mudler/LocalAI/examples/configurations/phi-2.yaml@master"
|
||||
phi-2: "github://mudler/LocalAI-examples/configurations/phi-2.yaml@main"
|
||||
|
||||
@@ -56,4 +56,7 @@
|
||||
sha: "8a9a74f4455f392ec3e7499cfda6097b536bb4b7f1e529a079c3d953c08b54ca"
|
||||
- filename: "KFOlCnqEu92Fr1MmYUtfBBc9.ttf"
|
||||
url: "https://fonts.gstatic.com/s/roboto/v30/KFOlCnqEu92Fr1MmYUtfBBc9.ttf"
|
||||
sha: "361a50f8a6c816ba4306c5290b7e487a726e1b4dcc3d8d7e4acf1fc2dae9f551"
|
||||
sha: "361a50f8a6c816ba4306c5290b7e487a726e1b4dcc3d8d7e4acf1fc2dae9f551"
|
||||
- filename: "flowbite.min.js"
|
||||
url: "https://cdnjs.cloudflare.com/ajax/libs/flowbite/2.3.0/flowbite.min.js"
|
||||
sha: "d2a1a72a4c2399e43c01412b86b9957c4df1845f2e0586607c7e55b9ae949cf8"
|
||||
@@ -1,190 +1,3 @@
|
||||
# Examples
|
||||
# LocalAI Examples
|
||||
|
||||
| [ChatGPT OSS alternative](https://github.com/go-skynet/LocalAI/tree/master/examples/chatbot-ui) | [Image generation](https://localai.io/api-endpoints/index.html#image-generation) |
|
||||
|------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|
|
||||
|  |  |
|
||||
|
||||
| [Telegram bot](https://github.com/go-skynet/LocalAI/tree/master/examples/telegram-bot) | [Flowise](https://github.com/go-skynet/LocalAI/tree/master/examples/flowise) |
|
||||
|------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------|
|
||||
 | | |
|
||||
|
||||
Here is a list of projects that can easily be integrated with the LocalAI backend.
|
||||
|
||||
|
||||
### Projects
|
||||
|
||||
### AutoGPT
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
This example shows how to use AutoGPT with LocalAI.
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/autoGPT/)
|
||||
|
||||
### Chatbot-UI
|
||||
|
||||
_by [@mkellerman](https://github.com/mkellerman)_
|
||||
|
||||

|
||||
|
||||
This integration shows how to use LocalAI with [mckaywrigley/chatbot-ui](https://github.com/mckaywrigley/chatbot-ui).
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/chatbot-ui/)
|
||||
|
||||
There is also a separate example to show how to manually setup a model: [example](https://github.com/go-skynet/LocalAI/tree/master/examples/chatbot-ui-manual/)
|
||||
|
||||
### K8sGPT
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
This example show how to use LocalAI inside Kubernetes with [k8sgpt](https://k8sgpt.ai).
|
||||
|
||||

|
||||
|
||||
### Fine-tuning a model and convert it to gguf to use it with LocalAI
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
This example is an e2e example on how to fine-tune a model with [axolotl](https://github.com/OpenAccess-AI-Collective/axolotl) and convert it to gguf to use it with LocalAI.
|
||||
|
||||
[Check it out here](https://github.com/mudler/LocalAI/tree/master/examples/e2e-fine-tuning/)
|
||||
|
||||
### Flowise
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
This example shows how to use [FlowiseAI/Flowise](https://github.com/FlowiseAI/Flowise) with LocalAI.
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/flowise/)
|
||||
|
||||
### Discord bot
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
Run a discord bot which lets you talk directly with a model
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/discord-bot/), or for a live demo you can talk with our bot in #random-bot in our discord server.
|
||||
|
||||
### Langchain
|
||||
|
||||
_by [@dave-gray101](https://github.com/dave-gray101)_
|
||||
|
||||
A ready to use example to show e2e how to integrate LocalAI with langchain
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/langchain/)
|
||||
|
||||
### Langchain Python
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
A ready to use example to show e2e how to integrate LocalAI with langchain
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/langchain-python/)
|
||||
|
||||
### LocalAI functions
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
A ready to use example to show how to use OpenAI functions with LocalAI
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/functions/)
|
||||
|
||||
### LocalAI WebUI
|
||||
|
||||
_by [@dhruvgera](https://github.com/dhruvgera)_
|
||||
|
||||

|
||||
|
||||
A light, community-maintained web interface for LocalAI
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/localai-webui/)
|
||||
|
||||
### How to run rwkv models
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
A full example on how to run RWKV models with LocalAI
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/rwkv/)
|
||||
|
||||
### PrivateGPT
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
A full example on how to run PrivateGPT with LocalAI
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/privateGPT/)
|
||||
|
||||
### Slack bot
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
Run a slack bot which lets you talk directly with a model
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/slack-bot/)
|
||||
|
||||
### Slack bot (Question answering)
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
Run a slack bot, ideally for teams, which lets you ask questions on a documentation website, or a github repository.
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/slack-qa-bot/)
|
||||
|
||||
### Question answering on documents with llama-index
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
Shows how to integrate with [Llama-Index](https://gpt-index.readthedocs.io/en/stable/getting_started/installation.html) to enable question answering on a set of documents.
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/query_data/)
|
||||
|
||||
### Question answering on documents with langchain and chroma
|
||||
|
||||
_by [@mudler](https://github.com/mudler)_
|
||||
|
||||
Shows how to integrate with `Langchain` and `Chroma` to enable question answering on a set of documents.
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/langchain-chroma/)
|
||||
|
||||
### Telegram bot
|
||||
|
||||
_by [@mudler](https://github.com/mudler)
|
||||
|
||||

|
||||
|
||||
Use LocalAI to power a Telegram bot assistant, with Image generation and audio support!
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/telegram-bot/)
|
||||
|
||||
### Template for Runpod.io
|
||||
|
||||
_by [@fHachenberg](https://github.com/fHachenberg)_
|
||||
|
||||
Allows to run any LocalAI-compatible model as a backend on the servers of https://runpod.io
|
||||
|
||||
[Check it out here](https://runpod.io/gsc?template=uv9mtqnrd0&ref=984wlcra)
|
||||
|
||||
### Continue
|
||||
|
||||
_by [@gruberdev](https://github.com/gruberdev)_
|
||||
|
||||
<img src="continue/img/screen.png" width="600" height="200" alt="Screenshot">
|
||||
|
||||
Demonstrates how to integrate an open-source copilot alternative that enhances code analysis, completion, and improvements. This approach seamlessly integrates with any LocalAI model, offering a more user-friendly experience.
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/continue/)
|
||||
|
||||
### Streamlit bot
|
||||
|
||||
_by [@majoshi1](https://github.com/majoshi1)_
|
||||
|
||||

|
||||
|
||||
A chat bot made using `Streamlit` & LocalAI.
|
||||
|
||||
[Check it out here](https://github.com/go-skynet/LocalAI/tree/master/examples/streamlit-bot/)
|
||||
|
||||
## Want to contribute?
|
||||
|
||||
Create an issue, and put `Example: <description>` in the title! We will post your examples here.
|
||||
LocalAI examples were moved to a dedicated repository: https://github.com/mudler/LocalAI-examples
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
# CPU .env docs: https://localai.io/howtos/easy-setup-docker-cpu/
|
||||
# GPU .env docs: https://localai.io/howtos/easy-setup-docker-gpu/
|
||||
|
||||
OPENAI_API_KEY=sk---anystringhere
|
||||
OPENAI_API_BASE=http://api:8080/v1
|
||||
# Models to preload at start
|
||||
# Here we configure gpt4all as gpt-3.5-turbo and bert as embeddings,
|
||||
# see other options in the model gallery at https://github.com/go-skynet/model-gallery
|
||||
PRELOAD_MODELS=[{"url": "github:go-skynet/model-gallery/gpt4all-j.yaml", "name": "gpt-3.5-turbo"}, { "url": "github:go-skynet/model-gallery/bert-embeddings.yaml", "name": "text-embedding-ada-002"}]
|
||||
@@ -1,36 +0,0 @@
|
||||
# AutoGPT
|
||||
|
||||
Example of integration with [AutoGPT](https://github.com/Significant-Gravitas/Auto-GPT).
|
||||
|
||||
## Run
|
||||
|
||||
```bash
|
||||
# Clone LocalAI
|
||||
git clone https://github.com/go-skynet/LocalAI
|
||||
|
||||
cd LocalAI/examples/autoGPT
|
||||
|
||||
cp -rfv .env.example .env
|
||||
|
||||
# Edit the .env file to set a different model by editing `PRELOAD_MODELS`.
|
||||
vim .env
|
||||
|
||||
docker-compose run --rm auto-gpt
|
||||
```
|
||||
|
||||
Note: The example automatically downloads the `gpt4all` model as it is under a permissive license. The GPT4All model does not seem to be enough to run AutoGPT. WizardLM-7b-uncensored seems to perform better (with `f16: true`).
|
||||
|
||||
|
||||
## Without docker
|
||||
|
||||
Run AutoGPT with `OPENAI_API_BASE` pointing to the LocalAI endpoint. If you run it locally for instance:
|
||||
|
||||
```
|
||||
OPENAI_API_BASE=http://localhost:8080 python ...
|
||||
```
|
||||
|
||||
Note: you need a model named `gpt-3.5-turbo` and `text-embedding-ada-002`. You can preload those in LocalAI at start by setting in the env:
|
||||
|
||||
```
|
||||
PRELOAD_MODELS=[{"url": "github:go-skynet/model-gallery/gpt4all-j.yaml", "name": "gpt-3.5-turbo"}, { "url": "github:go-skynet/model-gallery/bert-embeddings.yaml", "name": "text-embedding-ada-002"}]
|
||||
```
|
||||
@@ -1,42 +0,0 @@
|
||||
version: "3.9"
|
||||
services:
|
||||
api:
|
||||
image: quay.io/go-skynet/local-ai:latest
|
||||
ports:
|
||||
- 8080:8080
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- DEBUG=true
|
||||
- MODELS_PATH=/models
|
||||
volumes:
|
||||
- ./models:/models:cached
|
||||
command: ["/usr/bin/local-ai" ]
|
||||
auto-gpt:
|
||||
image: significantgravitas/auto-gpt
|
||||
depends_on:
|
||||
api:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_started
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
MEMORY_BACKEND: ${MEMORY_BACKEND:-redis}
|
||||
REDIS_HOST: ${REDIS_HOST:-redis}
|
||||
profiles: ["exclude-from-up"]
|
||||
volumes:
|
||||
- ./auto_gpt_workspace:/app/autogpt/auto_gpt_workspace
|
||||
- ./data:/app/data
|
||||
## allow auto-gpt to write logs to disk
|
||||
- ./logs:/app/logs
|
||||
## uncomment following lines if you want to make use of these files
|
||||
## you must have them existing in the same folder as this docker-compose.yml
|
||||
#- type: bind
|
||||
# source: ./azure.yaml
|
||||
# target: /app/azure.yaml
|
||||
#- type: bind
|
||||
# source: ./ai_settings.yaml
|
||||
# target: /app/ai_settings.yaml
|
||||
redis:
|
||||
image: "redis/redis-stack-server:latest"
|
||||
@@ -1,25 +0,0 @@
|
||||
# Use an official Python runtime as a parent image
|
||||
FROM python:3.12-slim
|
||||
|
||||
# Set the working directory in the container
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the current directory contents into the container at /app
|
||||
COPY requirements.txt /app
|
||||
|
||||
# Install c++ compiler
|
||||
RUN apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install any needed packages specified in requirements.txt
|
||||
RUN pip install --no-cache-dir -r requirements.txt \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get remove -y build-essential \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY . /app
|
||||
|
||||
# Run app.py when the container launches
|
||||
CMD ["chainlit", "run", "-h", "--host", "0.0.0.0", "main.py" ]
|
||||
@@ -1,25 +0,0 @@
|
||||
# LocalAI Demonstration with Embeddings and Chainlit
|
||||
|
||||
This demonstration shows you how to use embeddings with existing data in `LocalAI`, and how to integrate it with Chainlit for an interactive querying experience. We are using the `llama_index` library to facilitate the embedding and querying processes, and `chainlit` to provide an interactive interface. The `Weaviate` client is used as the embedding source.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before proceeding, make sure you have the following installed:
|
||||
- Weaviate client
|
||||
- LocalAI and its dependencies
|
||||
- Chainlit and its dependencies
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. Clone this repository:
|
||||
2. Navigate to the project directory:
|
||||
3. Run the example: `chainlit run main.py`
|
||||
|
||||
# Highlight on `llama_index` and `chainlit`
|
||||
|
||||
`llama_index` is the key library that facilitates the process of embedding and querying data in LocalAI. It provides a seamless interface to integrate various components, such as `WeaviateVectorStore`, `LocalAI`, `ServiceContext`, and more, for a smooth querying experience.
|
||||
|
||||
`chainlit` is used to provide an interactive interface for users to query the data and see the results in real-time. It integrates with llama_index to handle the querying process and display the results to the user.
|
||||
|
||||
In this example, `llama_index` is used to set up the `VectorStoreIndex` and `QueryEngine`, and `chainlit` is used to handle the user interactions with `LocalAI` and display the results.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user