Compare commits

..

11 Commits

Author SHA1 Message Date
fakezeta
c9451cb604 Bump oneapi-basekit, optimum and openvino (#2139)
* Bump oneapi-basekit, optimum and openvino

* Changed PERFORMANCE HINT to CUMULATIVE_THROUGHPUT

Minor latency change for first token but about 10-15% speedup on token generation.
2024-04-26 16:20:43 +02:00
Dave
006306b183 fix: use bluemonday as recommended by blackfriday (#2142)
use bluemonday as recommended by blackfriday

Signed-off-by: Dave Lee <dave@gray101.com>
2024-04-26 10:34:50 +02:00
Dave
2cd4936c99 fix: security scanner warning noise: error handlers part 1 (#2141)
first group of error handlers to reduce security scanner warning noise level

Signed-off-by: Dave Lee <dave@gray101.com>
2024-04-26 10:34:31 +02:00
Dave
44bc540bb5 fix: security scanner dislikes runCommand function arguments (#2140)
runCommand ==> ffmpegCommand. No functional changes, but makes it clear to the security scanner and future developers that this function cannot run arbitrary commands

Signed-off-by: Dave Lee <dave@gray101.com>
2024-04-26 10:33:12 +02:00
Ettore Di Giacinto
6b411ae212 models(gallery): add variants of llama3 70b (#2138)
Signed-off-by: Ettore Di Giacinto <mudler@users.noreply.github.com>
2024-04-26 00:48:06 +02:00
Dave
eed285f9de fix: update langchainjs (#2136)
quick update of the langchainjs example to quiet down some dependency security scanner noise

Signed-off-by: Dave Lee <dave@gray101.com>
2024-04-26 00:47:35 +02:00
Dave
c8dd8e5ef4 fix: reduce chmod permissions for created files and directories (#2137)
quiet more security scanner issues: pass one of chmod restriction to remove group and other permissions

Signed-off-by: Dave Lee <dave@gray101.com>
2024-04-26 00:47:06 +02:00
LocalAI [bot]
365ef92530 ⬆️ Update mudler/go-stable-diffusion (#2134)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: mudler <2420543+mudler@users.noreply.github.com>
2024-04-25 21:41:38 +00:00
LocalAI [bot]
5fceb876c4 ⬆️ Update ggerganov/llama.cpp (#2133)
Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: mudler <2420543+mudler@users.noreply.github.com>
2024-04-25 21:40:41 +00:00
cryptk
d98063e80e fix: api key polling was not using correct filepath (#2132)
Signed-off-by: Chris Jowett <421501+cryptk@users.noreply.github.com>
2024-04-25 20:06:22 +00:00
Dave
45761f8be2 fix: yamlint warnings and errors (#2131)
fix yamlint warnings and errors

Signed-off-by: Dave Lee <dave@gray101.com>
2024-04-25 17:25:56 +00:00
56 changed files with 1880 additions and 653 deletions

View File

@@ -68,7 +68,7 @@ jobs:
- build-type: 'sycl_f16'
platforms: 'linux/amd64'
tag-latest: 'false'
base-image: "intel/oneapi-basekit:2024.0.1-devel-ubuntu22.04"
base-image: "intel/oneapi-basekit:2024.1.0-devel-ubuntu22.04"
grpc-base-image: "ubuntu:22.04"
tag-suffix: 'sycl-f16-ffmpeg'
ffmpeg: 'true'
@@ -110,7 +110,7 @@ jobs:
- build-type: 'sycl_f16'
platforms: 'linux/amd64'
tag-latest: 'false'
base-image: "intel/oneapi-basekit:2024.0.1-devel-ubuntu22.04"
base-image: "intel/oneapi-basekit:2024.1.0-devel-ubuntu22.04"
grpc-base-image: "ubuntu:22.04"
tag-suffix: 'sycl-f16-ffmpeg-core'
ffmpeg: 'true'

View File

@@ -148,7 +148,7 @@ jobs:
- build-type: 'sycl_f16'
platforms: 'linux/amd64'
tag-latest: 'auto'
base-image: "intel/oneapi-basekit:2024.0.1-devel-ubuntu22.04"
base-image: "intel/oneapi-basekit:2024.1.0-devel-ubuntu22.04"
grpc-base-image: "ubuntu:22.04"
tag-suffix: '-sycl-f16-ffmpeg'
ffmpeg: 'true'
@@ -161,7 +161,7 @@ jobs:
- build-type: 'sycl_f32'
platforms: 'linux/amd64'
tag-latest: 'auto'
base-image: "intel/oneapi-basekit:2024.0.1-devel-ubuntu22.04"
base-image: "intel/oneapi-basekit:2024.1.0-devel-ubuntu22.04"
grpc-base-image: "ubuntu:22.04"
tag-suffix: '-sycl-f32-ffmpeg'
ffmpeg: 'true'
@@ -175,7 +175,7 @@ jobs:
- build-type: 'sycl_f16'
platforms: 'linux/amd64'
tag-latest: 'false'
base-image: "intel/oneapi-basekit:2024.0.1-devel-ubuntu22.04"
base-image: "intel/oneapi-basekit:2024.1.0-devel-ubuntu22.04"
grpc-base-image: "ubuntu:22.04"
tag-suffix: '-sycl-f16-core'
ffmpeg: 'false'
@@ -185,7 +185,7 @@ jobs:
- build-type: 'sycl_f32'
platforms: 'linux/amd64'
tag-latest: 'false'
base-image: "intel/oneapi-basekit:2024.0.1-devel-ubuntu22.04"
base-image: "intel/oneapi-basekit:2024.1.0-devel-ubuntu22.04"
grpc-base-image: "ubuntu:22.04"
tag-suffix: '-sycl-f32-core'
ffmpeg: 'false'
@@ -195,7 +195,7 @@ jobs:
- build-type: 'sycl_f16'
platforms: 'linux/amd64'
tag-latest: 'false'
base-image: "intel/oneapi-basekit:2024.0.1-devel-ubuntu22.04"
base-image: "intel/oneapi-basekit:2024.1.0-devel-ubuntu22.04"
grpc-base-image: "ubuntu:22.04"
tag-suffix: '-sycl-f16-ffmpeg-core'
ffmpeg: 'true'
@@ -205,7 +205,7 @@ jobs:
- build-type: 'sycl_f32'
platforms: 'linux/amd64'
tag-latest: 'false'
base-image: "intel/oneapi-basekit:2024.0.1-devel-ubuntu22.04"
base-image: "intel/oneapi-basekit:2024.1.0-devel-ubuntu22.04"
grpc-base-image: "ubuntu:22.04"
tag-suffix: '-sycl-f32-ffmpeg-core'
ffmpeg: 'true'

4
.yamllint Normal file
View File

@@ -0,0 +1,4 @@
extends: default
rules:
line-length: disable

View File

@@ -5,7 +5,7 @@ BINARY_NAME=local-ai
# llama.cpp versions
GOLLAMA_STABLE_VERSION?=2b57a8ae43e4699d3dc5d1496a1ccd42922993be
CPPLLAMA_VERSION?=784e11dea1f5ce9638851b2b0dddb107e2a609c8
CPPLLAMA_VERSION?=46e12c4692a37bdd31a0432fc5153d7d22bc7f72
# gpt4all version
GPT4ALL_REPO?=https://github.com/nomic-ai/gpt4all
@@ -25,7 +25,7 @@ BERT_VERSION?=6abe312cded14042f6b7c3cd8edf082713334a4d
PIPER_VERSION?=9d0100873a7dbb0824dfea40e8cec70a1b110759
# stablediffusion version
STABLEDIFFUSION_VERSION?=362df9da29f882dbf09ade61972d16a1f53c3485
STABLEDIFFUSION_VERSION?=433ea6d9b64d9d08067324a757ef07040ea29568
# tinydream version
TINYDREAM_VERSION?=22a12a4bc0ac5455856f28f3b771331a551a4293
@@ -707,7 +707,7 @@ docker-aio-all:
docker-image-intel:
docker build \
--build-arg BASE_IMAGE=intel/oneapi-basekit:2024.0.1-devel-ubuntu22.04 \
--build-arg BASE_IMAGE=intel/oneapi-basekit:2024.1.0-devel-ubuntu22.04 \
--build-arg IMAGE_TYPE=$(IMAGE_TYPE) \
--build-arg GO_TAGS="none" \
--build-arg MAKEFLAGS="$(DOCKER_MAKEFLAGS)" \
@@ -715,7 +715,7 @@ docker-image-intel:
docker-image-intel-xpu:
docker build \
--build-arg BASE_IMAGE=intel/oneapi-basekit:2024.0.1-devel-ubuntu22.04 \
--build-arg BASE_IMAGE=intel/oneapi-basekit:2024.1.0-devel-ubuntu22.04 \
--build-arg IMAGE_TYPE=$(IMAGE_TYPE) \
--build-arg GO_TAGS="none" \
--build-arg MAKEFLAGS="$(DOCKER_MAKEFLAGS)" \

View File

@@ -11,8 +11,8 @@ import (
"github.com/go-skynet/LocalAI/core/schema"
)
func runCommand(command []string) (string, error) {
cmd := exec.Command(command[0], command[1:]...)
func ffmpegCommand(args []string) (string, error) {
cmd := exec.Command("ffmpeg", args...) // Constrain this to ffmpeg to permit security scanner to see that the command is safe.
cmd.Env = os.Environ()
out, err := cmd.CombinedOutput()
return string(out), err
@@ -21,8 +21,8 @@ func runCommand(command []string) (string, error) {
// AudioToWav converts audio to wav for transcribe.
// TODO: use https://github.com/mccoyst/ogg?
func audioToWav(src, dst string) error {
command := []string{"ffmpeg", "-i", src, "-format", "s16le", "-ar", "16000", "-ac", "1", "-acodec", "pcm_s16le", dst}
out, err := runCommand(command)
commandArgs := []string{"-i", src, "-format", "s16le", "-ar", "16000", "-ac", "1", "-acodec", "pcm_s16le", dst}
out, err := ffmpegCommand(commandArgs)
if err != nil {
return fmt.Errorf("error: %w out: %s", err, out)
}

View File

@@ -60,9 +60,10 @@ dependencies:
- networkx
- numpy==1.26.0
- onnx==1.15.0
- openvino==2024.0.0
- openvino-telemetry==2023.2.1
- optimum[openvino]==1.17.1
- openvino==2024.1.0
- openvino-telemetry==2024.1.0
- optimum[openvino]==1.19.1
- optimum-intel==1.16.1
- packaging==23.2
- pandas
- peft==0.5.0

View File

@@ -150,7 +150,7 @@ class BackendServicer(backend_pb2_grpc.BackendServicer):
self.model = OVModelForCausalLM.from_pretrained(model_name,
compile=True,
trust_remote_code=request.TrustRemoteCode,
ov_config={"PERFORMANCE_HINT": "LATENCY"},
ov_config={"PERFORMANCE_HINT": "CUMULATIVE_THROUGHPUT"},
device=device_map)
self.OV = True
else:

View File

@@ -7,7 +7,8 @@ import (
"github.com/go-skynet/LocalAI/core/config"
pb "github.com/go-skynet/LocalAI/pkg/grpc/proto"
model "github.com/go-skynet/LocalAI/pkg/model"
"github.com/go-skynet/LocalAI/pkg/model"
"github.com/rs/zerolog/log"
)
func modelOpts(c config.BackendConfig, so *config.ApplicationConfig, opts []model.Option) []model.Option {
@@ -109,8 +110,12 @@ func gRPCPredictOpts(c config.BackendConfig, modelPath string) *pb.PredictOption
promptCachePath := ""
if c.PromptCachePath != "" {
p := filepath.Join(modelPath, c.PromptCachePath)
os.MkdirAll(filepath.Dir(p), 0755)
promptCachePath = p
err := os.MkdirAll(filepath.Dir(p), 0750)
if err == nil {
promptCachePath = p
} else {
log.Error().Err(err).Str("promptCachePath", promptCachePath).Msg("error creating prompt cache folder")
}
}
return &pb.PredictOptions{

View File

@@ -53,7 +53,7 @@ func ModelTTS(backend, text, modelFile, voice string, loader *model.ModelLoader,
return "", nil, fmt.Errorf("could not load piper model")
}
if err := os.MkdirAll(appConfig.AudioDir, 0755); err != nil {
if err := os.MkdirAll(appConfig.AudioDir, 0750); err != nil {
return "", nil, fmt.Errorf("failed creating audio directory: %s", err)
}

View File

@@ -175,11 +175,11 @@ func App(cl *config.BackendConfigLoader, ml *model.ModelLoader, appConfig *confi
}
// Make sure directories exists
os.MkdirAll(appConfig.ImageDir, 0755)
os.MkdirAll(appConfig.AudioDir, 0755)
os.MkdirAll(appConfig.UploadDir, 0755)
os.MkdirAll(appConfig.ConfigsDir, 0755)
os.MkdirAll(appConfig.ModelPath, 0755)
os.MkdirAll(appConfig.ImageDir, 0750)
os.MkdirAll(appConfig.AudioDir, 0750)
os.MkdirAll(appConfig.UploadDir, 0750)
os.MkdirAll(appConfig.ConfigsDir, 0750)
os.MkdirAll(appConfig.ModelPath, 0750)
// Load config jsons
utils.LoadConfig(appConfig.UploadDir, openai.UploadedFilesFile, &openai.UploadedFiles)

View File

@@ -222,7 +222,7 @@ var _ = Describe("API test", func() {
modelDir = filepath.Join(tmpdir, "models")
backendAssetsDir := filepath.Join(tmpdir, "backend-assets")
err = os.Mkdir(backendAssetsDir, 0755)
err = os.Mkdir(backendAssetsDir, 0750)
Expect(err).ToNot(HaveOccurred())
c, cancel = context.WithCancel(context.Background())
@@ -241,7 +241,7 @@ var _ = Describe("API test", func() {
}
out, err := yaml.Marshal(g)
Expect(err).ToNot(HaveOccurred())
err = os.WriteFile(filepath.Join(tmpdir, "gallery_simple.yaml"), out, 0644)
err = os.WriteFile(filepath.Join(tmpdir, "gallery_simple.yaml"), out, 0600)
Expect(err).ToNot(HaveOccurred())
galleries := []gallery.Gallery{
@@ -595,7 +595,7 @@ var _ = Describe("API test", func() {
Expect(err).ToNot(HaveOccurred())
modelDir = filepath.Join(tmpdir, "models")
backendAssetsDir := filepath.Join(tmpdir, "backend-assets")
err = os.Mkdir(backendAssetsDir, 0755)
err = os.Mkdir(backendAssetsDir, 0750)
Expect(err).ToNot(HaveOccurred())
c, cancel = context.WithCancel(context.Background())

View File

@@ -3,10 +3,6 @@ package openai
import (
"encoding/json"
"fmt"
"github.com/go-skynet/LocalAI/core/config"
"github.com/go-skynet/LocalAI/pkg/model"
"github.com/gofiber/fiber/v2"
"github.com/stretchr/testify/assert"
"io"
"io/ioutil"
"net/http"
@@ -16,6 +12,11 @@ import (
"strings"
"testing"
"time"
"github.com/go-skynet/LocalAI/core/config"
"github.com/go-skynet/LocalAI/pkg/model"
"github.com/gofiber/fiber/v2"
"github.com/stretchr/testify/assert"
)
var configsDir string = "/tmp/localai/configs"
@@ -49,8 +50,8 @@ func TestAssistantEndpoints(t *testing.T) {
}
_ = os.RemoveAll(appConfig.ConfigsDir)
_ = os.MkdirAll(appConfig.ConfigsDir, 0755)
_ = os.MkdirAll(modelPath, 0755)
_ = os.MkdirAll(appConfig.ConfigsDir, 0750)
_ = os.MkdirAll(modelPath, 0750)
os.Create(filepath.Join(modelPath, "ggml-gpt4all-j"))
app := fiber.New(fiber.Config{

View File

@@ -251,7 +251,7 @@ func newMultipartFile(filePath, tag, purpose string) (*strings.Reader, *multipar
// Helper to create test files
func createTestFile(t *testing.T, name string, sizeMB int, option *config.ApplicationConfig) *os.File {
err := os.MkdirAll(option.UploadDir, 0755)
err := os.MkdirAll(option.UploadDir, 0750)
if err != nil {
t.Fatalf("Error MKDIR: %v", err)

View File

@@ -10,6 +10,7 @@ import (
"github.com/go-skynet/LocalAI/core/schema"
"github.com/gofiber/fiber/v2"
fiberhtml "github.com/gofiber/template/html/v2"
"github.com/microcosm-cc/bluemonday"
"github.com/russross/blackfriday"
)
@@ -39,5 +40,5 @@ func renderEngine() *fiberhtml.Engine {
func markDowner(args ...interface{}) template.HTML {
s := blackfriday.MarkdownCommon([]byte(fmt.Sprintf("%s", args...)))
return template.HTML(s)
return template.HTML(bluemonday.UGCPolicy().Sanitize(string(s)))
}

View File

@@ -5,6 +5,7 @@ import (
"fmt"
"os"
"path"
"path/filepath"
"time"
"github.com/fsnotify/fsnotify"
@@ -50,6 +51,7 @@ func (c *configFileHandler) Register(filename string, handler fileHandler, runNo
}
func (c *configFileHandler) callHandler(filename string, handler fileHandler) {
log.Trace().Str("filename", filename).Msg("reading file for dynamic config update")
fileContent, err := os.ReadFile(filename)
if err != nil && !os.IsNotExist(err) {
log.Error().Err(err).Str("filename", filename).Msg("could not read file")
@@ -75,7 +77,7 @@ func (c *configFileHandler) Watch() error {
<-ticker.C
for file, handler := range c.handlers {
log.Debug().Str("file", file).Msg("polling config file")
c.callHandler(file, handler)
c.callHandler(filepath.Join(c.appConfig.DynamicConfigsDir, file), handler)
}
}
}()
@@ -122,7 +124,8 @@ func (c *configFileHandler) Stop() {
func readApiKeysJson(startupAppConfig config.ApplicationConfig) fileHandler {
handler := func(fileContent []byte, appConfig *config.ApplicationConfig) error {
log.Debug().Msg("processing api_keys.json")
log.Debug().Msg("processing api keys runtime update")
log.Trace().Int("numKeys", len(startupAppConfig.ApiKeys)).Msg("api keys provided at startup")
if len(fileContent) > 0 {
// Parse JSON content from the file
@@ -132,11 +135,14 @@ func readApiKeysJson(startupAppConfig config.ApplicationConfig) fileHandler {
return err
}
log.Trace().Int("numKeys", len(fileKeys)).Msg("discovered API keys from api keys dynamic config dile")
appConfig.ApiKeys = append(startupAppConfig.ApiKeys, fileKeys...)
} else {
log.Trace().Msg("no API keys discovered from dynamic config file")
appConfig.ApiKeys = startupAppConfig.ApiKeys
}
log.Debug().Msg("api keys loaded from api_keys.json")
log.Trace().Int("numKeys", len(appConfig.ApiKeys)).Msg("total api keys after processing")
return nil
}

View File

@@ -23,24 +23,24 @@ func Startup(opts ...config.AppOption) (*config.BackendConfigLoader, *model.Mode
if options.ModelPath == "" {
return nil, nil, nil, fmt.Errorf("options.ModelPath cannot be empty")
}
err := os.MkdirAll(options.ModelPath, 0755)
err := os.MkdirAll(options.ModelPath, 0750)
if err != nil {
return nil, nil, nil, fmt.Errorf("unable to create ModelPath: %q", err)
}
if options.ImageDir != "" {
err := os.MkdirAll(options.ImageDir, 0755)
err := os.MkdirAll(options.ImageDir, 0750)
if err != nil {
return nil, nil, nil, fmt.Errorf("unable to create ImageDir: %q", err)
}
}
if options.AudioDir != "" {
err := os.MkdirAll(options.AudioDir, 0755)
err := os.MkdirAll(options.AudioDir, 0750)
if err != nil {
return nil, nil, nil, fmt.Errorf("unable to create AudioDir: %q", err)
}
}
if options.UploadDir != "" {
err := os.MkdirAll(options.UploadDir, 0755)
err := os.MkdirAll(options.UploadDir, 0750)
if err != nil {
return nil, nil, nil, fmt.Errorf("unable to create UploadDir: %q", err)
}
@@ -122,7 +122,10 @@ func Startup(opts ...config.AppOption) (*config.BackendConfigLoader, *model.Mode
// Watch the configuration directory
// If the directory does not exist, we don't watch it
configHandler := newConfigFileHandler(options)
configHandler.Watch()
err = configHandler.Watch()
if err != nil {
log.Error().Err(err).Msg("error establishing configuration directory watcher")
}
log.Info().Msg("core/startup process completed!")
return cl, ml, options, nil

View File

@@ -7,6 +7,7 @@ import (
"strings"
"github.com/go-skynet/LocalAI/pkg/downloader"
"github.com/rs/zerolog/log"
"github.com/go-skynet/LocalAI/pkg/assets"
"gopkg.in/yaml.v3"
@@ -29,7 +30,10 @@ func ModelShortURL(s string) string {
}
func init() {
yaml.Unmarshal(modelLibrary, &modelShorteners)
err := yaml.Unmarshal(modelLibrary, &modelShorteners)
if err != nil {
log.Error().Err(err).Msg("error while unmarshalling embedded modelLibrary")
}
}
func GetRemoteLibraryShorteners(url string) (map[string]string, error) {

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "langchainjs-localai-example",
"version": "0.1.0",
"version": "0.1.1",
"description": "Trivial Example of using langchain + the OpenAI API + LocalAI together",
"main": "index.mjs",
"scripts": {
@@ -15,7 +15,11 @@
"typescript": "^5.0.4"
},
"dependencies": {
"langchain": "^0.0.67",
"typeorm": "^0.3.15"
"@langchain/community": "^0.0.52",
"@langchain/openai": "^0.0.28",
"langchain": "^0.1.36"
},
"overrides": {
"@langchain/core": "0.1.5"
}
}

View File

@@ -1,15 +1,17 @@
import { OpenAIChat } from "langchain/llms/openai";
import { loadQAStuffChain } from "langchain/chains";
import { Document } from "langchain/document";
import { initializeAgentExecutorWithOptions } from "langchain/agents";
import {Calculator} from "langchain/tools/calculator";
import { pull } from "langchain/hub";
import { AgentExecutor, createOpenAIToolsAgent } from "langchain/agents";
import {Calculator} from "@langchain/community/tools/calculator";
import { ChatOpenAI } from "@langchain/openai";
import type { ChatPromptTemplate } from "@langchain/core/prompts";
const pathToLocalAI = process.env['OPENAI_API_BASE'] || 'http://api:8080/v1';
const fakeApiKey = process.env['OPENAI_API_KEY'] || '-';
const modelName = process.env['MODEL_NAME'] || 'gpt-3.5-turbo';
function getModel(): OpenAIChat {
return new OpenAIChat({
function getModel(): ChatOpenAI {
return new ChatOpenAI({
prefixMessages: [
{
role: "system",
@@ -29,8 +31,8 @@ function getModel(): OpenAIChat {
// Minimal example.
export const run = async () => {
const model = getModel();
console.log(`about to model.call at ${new Date().toUTCString()}`);
const res = await model.call(
console.log(`about to model.invoke at ${new Date().toUTCString()}`);
const res = await model.invoke(
"What would be a good company name a company that makes colorful socks?"
);
console.log(`${new Date().toUTCString()}`);
@@ -47,7 +49,7 @@ export const run2 = async () => {
new Document({ pageContent: "Harrison went to Harvard." }),
new Document({ pageContent: "Ankush went to Princeton." }),
];
const resA = await chainA.call({
const resA = await chainA.invoke({
input_documents: docs,
question: "Where did Harrison go to college?",
});
@@ -58,22 +60,33 @@ await run2();
// Quickly thrown together example of using tools + agents.
// This seems like it should work, but it doesn't yet.
export const temporarilyBrokenToolTest = async () => {
export const toolAgentTest = async () => {
const model = getModel();
const executor = await initializeAgentExecutorWithOptions([new Calculator(true)], model, {
agentType: "zero-shot-react-description",
const prompt = await pull<ChatPromptTemplate>("hwchase17/openai-tools-agent");
const tools = [new Calculator()];
const agent = await createOpenAIToolsAgent({
llm: model,
tools: tools,
prompt: prompt
});
console.log("Loaded agent.");
const agentExecutor = new AgentExecutor({
agent,
tools,
});
const input = `What is the value of (500 *2) + 350 - 13?`;
console.log(`Executing with input "${input}"...`);
const result = await executor.call({ input });
const result = await agentExecutor.invoke({ input });
console.log(`Got output ${result.output}`);
}
await temporarilyBrokenToolTest();
await toolAgentTest();

View File

@@ -8,7 +8,8 @@
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"isolatedModules": true,
"outDir": "./dist"
"outDir": "./dist",
"skipLibCheck": true
},
"include": ["src", "test"],
"exclude": ["node_modules", "dist"]

View File

@@ -239,7 +239,10 @@ func (app *App) updateUI() {
task := Task{Description: inputField.GetText()}
app.tasks = append(app.tasks, task)
app.state = StateRoot
postTasksToExternalService([]Task{task})
err := postTasksToExternalService([]Task{task})
if err != nil {
panic(err)
}
}
app.updateUI()
})

View File

@@ -1,3 +1,4 @@
---
name: "bert-embeddings"
config_file: |
@@ -6,6 +7,6 @@ config_file: |
backend: bert-embeddings
embeddings: true
files:
- filename: "bert-MiniLM-L6-v2q4_0.bin"
sha256: "a5a174d8772c8a569faf9f3136c441f2c3855b5bf35ed32274294219533feaad"
uri: "https://huggingface.co/mudler/all-MiniLM-L6-v2/resolve/main/ggml-model-q4_0.bin"
- filename: "bert-MiniLM-L6-v2q4_0.bin"
sha256: "a5a174d8772c8a569faf9f3136c441f2c3855b5bf35ed32274294219533feaad"
uri: "https://huggingface.co/mudler/all-MiniLM-L6-v2/resolve/main/ggml-model-q4_0.bin"

View File

@@ -1,3 +1,4 @@
---
config_file: |
backend: llama-cpp
context_size: 8192
@@ -15,5 +16,5 @@ config_file: |
stopwords:
- "[|Umano|]"
trimsuffix:
trimsuffix:
- "\n"

View File

@@ -1,7 +1,8 @@
---
name: "codellama"
config_file: |
backend: llama-cpp
context_size: 4096
f16: true
mmap: true
mmap: true

View File

@@ -1,6 +1,6 @@
---
name: "dreamshaper"
config_file: |
backend: diffusers
step: 25

View File

@@ -1,6 +1,6 @@
---
name: "hermes-2-pro-mistral"
config_file: |
mmap: true
template:
@@ -52,4 +52,3 @@ config_file: |
- <dummy32000>
- "\n</tool_call>"
- "\n\n\n"

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
---
name: "llama3-instruct"
config_file: |
mmap: true
template:

View File

@@ -1,6 +1,6 @@
---
name: "llava"
config_file: |
backend: llama-cpp
context_size: 4096

View File

@@ -1,3 +1,4 @@
---
config_file: |
mmap: true
backend: llama-cpp
@@ -50,4 +51,3 @@ config_file: |
- <dummy32000>
- "\n</tool_call>"
- "\n\n\n"

View File

@@ -1,2 +1,3 @@
---
config_file: |
backend: parler-tts

View File

@@ -1,6 +1,6 @@
---
name: "phi-2-chatml"
config_file: |
mmap: true
template:
@@ -16,4 +16,3 @@ config_file: |
f16: true
stopwords:
- <|im_end|>

View File

@@ -1,3 +1,4 @@
---
name: "phi-2-orange"
config_file: |

View File

@@ -1,3 +1,4 @@
---
name: "phi-3-chat"
config_file: |
@@ -15,4 +16,3 @@ config_file: |
f16: true
stopwords:
- <|end|>

View File

@@ -1,2 +1,3 @@
---
config_file: |
backend: piper

View File

@@ -1,2 +1,3 @@
---
config_file: |
backend: rerankers
backend: rerankers

View File

@@ -1,4 +1,5 @@
---
name: "sentencetransformers"
config_file: |
backend: sentencetransformers
backend: sentencetransformers

View File

@@ -1,3 +1,4 @@
---
name: "stablediffusion-cpp"
config_file: |
@@ -7,42 +8,42 @@ config_file: |
model: stablediffusion_assets
files:
- filename: "stablediffusion_assets/AutoencoderKL-256-256-fp16-opt.param"
sha256: "18ca4b66685e21406bcf64c484b3b680b4949900415536d599cc876579c85c82"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/AutoencoderKL-256-256-fp16-opt.param"
- filename: "stablediffusion_assets/AutoencoderKL-512-512-fp16-opt.param"
sha256: "cf45f63aacf3dbbab0f59ed92a6f2c14d9a1801314631cd3abe91e3c85639a20"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/AutoencoderKL-512-512-fp16-opt.param"
- filename: "stablediffusion_assets/AutoencoderKL-base-fp16.param"
sha256: "0254a056dce61b0c27dc9ec1b78b53bcf55315c540f55f051eb841aa992701ba"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/AutoencoderKL-base-fp16.param"
- filename: "stablediffusion_assets/AutoencoderKL-encoder-512-512-fp16.bin"
sha256: "ddcb79a9951b9f91e05e087739ed69da2c1c4ae30ba4168cce350b49d617c9fa"
uri: "https://github.com/EdVince/Stable-Diffusion-NCNN/releases/download/naifu/AutoencoderKL-encoder-512-512-fp16.bin"
- filename: "stablediffusion_assets/AutoencoderKL-fp16.bin"
sha256: "f02e71f80e70252734724bbfaed5c4ddd3a8ed7e61bb2175ff5f53099f0e35dd"
uri: "https://github.com/EdVince/Stable-Diffusion-NCNN/releases/download/naifu/AutoencoderKL-fp16.bin"
- filename: "stablediffusion_assets/FrozenCLIPEmbedder-fp16.bin"
sha256: "1c9a12f4e1dd1b295a388045f7f28a2352a4d70c3dc96a542189a3dd7051fdd6"
uri: "https://github.com/EdVince/Stable-Diffusion-NCNN/releases/download/naifu/FrozenCLIPEmbedder-fp16.bin"
- filename: "stablediffusion_assets/FrozenCLIPEmbedder-fp16.param"
sha256: "471afbe678dd1fd3fe764ef9c6eccaccb0a7d7e601f27b462aa926b20eb368c9"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/FrozenCLIPEmbedder-fp16.param"
- filename: "stablediffusion_assets/log_sigmas.bin"
sha256: "a2089f8aa4c61f9c200feaec541ab3f5c94233b28deb6d5e8bcd974fa79b68ac"
uri: "https://github.com/EdVince/Stable-Diffusion-NCNN/raw/main/x86/linux/assets/log_sigmas.bin"
- filename: "stablediffusion_assets/UNetModel-256-256-MHA-fp16-opt.param"
sha256: "a58c380229f09491776df837b7aa7adffc0a87821dc4708b34535da2e36e3da1"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/UNetModel-256-256-MHA-fp16-opt.param"
- filename: "stablediffusion_assets/UNetModel-512-512-MHA-fp16-opt.param"
sha256: "f12034067062827bd7f43d1d21888d1f03905401acf6c6eea22be23c259636fa"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/UNetModel-512-512-MHA-fp16-opt.param"
- filename: "stablediffusion_assets/UNetModel-base-MHA-fp16.param"
sha256: "696f6975de49f4325b53ce32aff81861a6d6c07cd9ce3f0aae2cc405350af38d"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/UNetModel-base-MHA-fp16.param"
- filename: "stablediffusion_assets/UNetModel-MHA-fp16.bin"
sha256: "d618918d011bfc1f644c0f2a33bf84931bd53b28a98492b0a8ed6f3a818852c3"
uri: "https://github.com/EdVince/Stable-Diffusion-NCNN/releases/download/naifu/UNetModel-MHA-fp16.bin"
- filename: "stablediffusion_assets/vocab.txt"
sha256: "e30e57b6f1e47616982ef898d8922be24e535b4fa3d0110477b3a6f02ebbae7d"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/vocab.txt"
- filename: "stablediffusion_assets/AutoencoderKL-256-256-fp16-opt.param"
sha256: "18ca4b66685e21406bcf64c484b3b680b4949900415536d599cc876579c85c82"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/AutoencoderKL-256-256-fp16-opt.param"
- filename: "stablediffusion_assets/AutoencoderKL-512-512-fp16-opt.param"
sha256: "cf45f63aacf3dbbab0f59ed92a6f2c14d9a1801314631cd3abe91e3c85639a20"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/AutoencoderKL-512-512-fp16-opt.param"
- filename: "stablediffusion_assets/AutoencoderKL-base-fp16.param"
sha256: "0254a056dce61b0c27dc9ec1b78b53bcf55315c540f55f051eb841aa992701ba"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/AutoencoderKL-base-fp16.param"
- filename: "stablediffusion_assets/AutoencoderKL-encoder-512-512-fp16.bin"
sha256: "ddcb79a9951b9f91e05e087739ed69da2c1c4ae30ba4168cce350b49d617c9fa"
uri: "https://github.com/EdVince/Stable-Diffusion-NCNN/releases/download/naifu/AutoencoderKL-encoder-512-512-fp16.bin"
- filename: "stablediffusion_assets/AutoencoderKL-fp16.bin"
sha256: "f02e71f80e70252734724bbfaed5c4ddd3a8ed7e61bb2175ff5f53099f0e35dd"
uri: "https://github.com/EdVince/Stable-Diffusion-NCNN/releases/download/naifu/AutoencoderKL-fp16.bin"
- filename: "stablediffusion_assets/FrozenCLIPEmbedder-fp16.bin"
sha256: "1c9a12f4e1dd1b295a388045f7f28a2352a4d70c3dc96a542189a3dd7051fdd6"
uri: "https://github.com/EdVince/Stable-Diffusion-NCNN/releases/download/naifu/FrozenCLIPEmbedder-fp16.bin"
- filename: "stablediffusion_assets/FrozenCLIPEmbedder-fp16.param"
sha256: "471afbe678dd1fd3fe764ef9c6eccaccb0a7d7e601f27b462aa926b20eb368c9"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/FrozenCLIPEmbedder-fp16.param"
- filename: "stablediffusion_assets/log_sigmas.bin"
sha256: "a2089f8aa4c61f9c200feaec541ab3f5c94233b28deb6d5e8bcd974fa79b68ac"
uri: "https://github.com/EdVince/Stable-Diffusion-NCNN/raw/main/x86/linux/assets/log_sigmas.bin"
- filename: "stablediffusion_assets/UNetModel-256-256-MHA-fp16-opt.param"
sha256: "a58c380229f09491776df837b7aa7adffc0a87821dc4708b34535da2e36e3da1"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/UNetModel-256-256-MHA-fp16-opt.param"
- filename: "stablediffusion_assets/UNetModel-512-512-MHA-fp16-opt.param"
sha256: "f12034067062827bd7f43d1d21888d1f03905401acf6c6eea22be23c259636fa"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/UNetModel-512-512-MHA-fp16-opt.param"
- filename: "stablediffusion_assets/UNetModel-base-MHA-fp16.param"
sha256: "696f6975de49f4325b53ce32aff81861a6d6c07cd9ce3f0aae2cc405350af38d"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/UNetModel-base-MHA-fp16.param"
- filename: "stablediffusion_assets/UNetModel-MHA-fp16.bin"
sha256: "d618918d011bfc1f644c0f2a33bf84931bd53b28a98492b0a8ed6f3a818852c3"
uri: "https://github.com/EdVince/Stable-Diffusion-NCNN/releases/download/naifu/UNetModel-MHA-fp16.bin"
- filename: "stablediffusion_assets/vocab.txt"
sha256: "e30e57b6f1e47616982ef898d8922be24e535b4fa3d0110477b3a6f02ebbae7d"
uri: "https://raw.githubusercontent.com/EdVince/Stable-Diffusion-NCNN/main/x86/linux/assets/vocab.txt"

View File

@@ -1,3 +1,4 @@
---
name: "tinydream"
config_file: |
@@ -33,4 +34,4 @@ files:
uri: "https://github.com/M0Rf30/tiny-dream-bins/releases/download/1.0/UNetModel-fp16.param"
- filename: "tinydream_assets/vocab.txt"
sha256: "e30e57b6f1e47616982ef898d8922be24e535b4fa3d0110477b3a6f02ebbae7d"
uri: "https://github.com/M0Rf30/tiny-dream-bins/releases/download/1.0/vocab.txt"
uri: "https://github.com/M0Rf30/tiny-dream-bins/releases/download/1.0/vocab.txt"

View File

@@ -1,3 +1,4 @@
---
name: "vicuna-chat"
description: |
@@ -18,4 +19,4 @@ config_file: |
Complete the following sentence: {{.Input}}
chat: |
{{.Input}}
ASSISTANT:
ASSISTANT:

View File

@@ -1,3 +1,4 @@
---
name: "virtual"
description: |

View File

@@ -1,12 +1,12 @@
---
name: "whisper-base"
config_file: |
backend: whisper
parameters:
model: ggml-whisper-base.bin
files:
- filename: "ggml-whisper-base.bin"
sha256: "60ed5bc3dd14eea856493d334349b405782ddcaf0028d4b5df4088345fba2efe"
uri: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.bin"
- filename: "ggml-whisper-base.bin"
sha256: "60ed5bc3dd14eea856493d334349b405782ddcaf0028d4b5df4088345fba2efe"
uri: "https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.bin"

10
go.mod
View File

@@ -93,7 +93,7 @@ require (
github.com/golang/protobuf v1.5.3 // indirect
github.com/golang/snappy v0.0.2 // indirect
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
github.com/gorilla/css v1.0.0 // indirect
github.com/gorilla/css v1.0.1 // indirect
github.com/huandu/xstrings v1.3.3 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/klauspost/pgzip v1.2.5 // indirect
@@ -133,9 +133,9 @@ require (
github.com/yuin/goldmark-emoji v1.0.2 // indirect
go.opentelemetry.io/otel/sdk v1.19.0 // indirect
go.opentelemetry.io/otel/trace v1.19.0 // indirect
golang.org/x/crypto v0.21.0 // indirect
golang.org/x/crypto v0.22.0 // indirect
golang.org/x/mod v0.16.0 // indirect
golang.org/x/term v0.18.0 // indirect
golang.org/x/term v0.19.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d // indirect
gopkg.in/fsnotify.v1 v1.4.7 // indirect
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
@@ -161,8 +161,8 @@ require (
github.com/rivo/uniseg v0.2.0 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect
github.com/valyala/tcplisten v1.0.0 // indirect
golang.org/x/net v0.22.0 // indirect
golang.org/x/sys v0.18.0 // indirect
golang.org/x/net v0.24.0 // indirect
golang.org/x/sys v0.19.0 // indirect
golang.org/x/text v0.14.0 // indirect
golang.org/x/tools v0.19.0 // indirect
)

10
go.sum
View File

@@ -146,6 +146,8 @@ github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU=
github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY=
github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c=
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
@@ -377,6 +379,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30=
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
@@ -395,6 +399,8 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug
golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
golang.org/x/net v0.22.0 h1:9sGLhx7iRIHEiX0oAJ3MRZMUCElJgy7Br1nO+AMN3Tc=
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -431,12 +437,16 @@ golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8=
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q=
golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=

View File

@@ -43,7 +43,11 @@ func main() {
for _, envFile := range envFiles {
if _, err := os.Stat(envFile); err == nil {
log.Info().Str("envFile", envFile).Msg("loading environment variables from file")
godotenv.Load(envFile)
err = godotenv.Load(envFile)
if err != nil {
log.Error().Err(err).Str("envFile", envFile).Msg("failed to load environment variables from file")
continue
}
}
}

View File

@@ -10,7 +10,7 @@ import (
func ExtractFiles(content embed.FS, extractDir string) error {
// Create the target directory if it doesn't exist
err := os.MkdirAll(extractDir, 0755)
err := os.MkdirAll(extractDir, 0750)
if err != nil {
return fmt.Errorf("failed to create directory: %v", err)
}
@@ -25,7 +25,7 @@ func ExtractFiles(content embed.FS, extractDir string) error {
targetFile := filepath.Join(extractDir, path)
if d.IsDir() {
// Create the directory in the target directory
err := os.MkdirAll(targetFile, 0755)
err := os.MkdirAll(targetFile, 0750)
if err != nil {
return fmt.Errorf("failed to create directory: %v", err)
}
@@ -39,7 +39,7 @@ func ExtractFiles(content embed.FS, extractDir string) error {
}
// Create the file in the target directory
err = os.WriteFile(targetFile, fileData, 0644)
err = os.WriteFile(targetFile, fileData, 0600)
if err != nil {
return fmt.Errorf("failed to write file: %v", err)
}

View File

@@ -3,10 +3,12 @@ package assets
import (
"embed"
"io/fs"
"github.com/rs/zerolog/log"
)
func ListFiles(content embed.FS) (files []string) {
fs.WalkDir(content, ".", func(path string, d fs.DirEntry, err error) error {
err := fs.WalkDir(content, ".", func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
@@ -18,5 +20,8 @@ func ListFiles(content embed.FS) (files []string) {
files = append(files, path)
return nil
})
if err != nil {
log.Error().Err(err).Msg("error walking the embedded filesystem")
}
return
}

View File

@@ -184,7 +184,7 @@ func DownloadFile(url string, filePath, sha string, fileN, total int, downloadSt
}
// Create parent directory
err = os.MkdirAll(filepath.Dir(filePath), 0755)
err = os.MkdirAll(filepath.Dir(filePath), 0750)
if err != nil {
return fmt.Errorf("failed to create parent directory for file %q: %v", filePath, err)
}

View File

@@ -92,7 +92,7 @@ func ReadConfigFile(filePath string) (*Config, error) {
func InstallModel(basePath, nameOverride string, config *Config, configOverrides map[string]interface{}, downloadStatus func(string, string, string, float64)) error {
// Create base path if it doesn't exist
err := os.MkdirAll(basePath, 0755)
err := os.MkdirAll(basePath, 0750)
if err != nil {
return fmt.Errorf("failed to create base path: %v", err)
}
@@ -125,12 +125,12 @@ func InstallModel(basePath, nameOverride string, config *Config, configOverrides
filePath := filepath.Join(basePath, template.Name+".tmpl")
// Create parent directory
err := os.MkdirAll(filepath.Dir(filePath), 0755)
err := os.MkdirAll(filepath.Dir(filePath), 0750)
if err != nil {
return fmt.Errorf("failed to create parent directory for prompt template %q: %v", template.Name, err)
}
// Create and write file content
err = os.WriteFile(filePath, []byte(template.Content), 0644)
err = os.WriteFile(filePath, []byte(template.Content), 0600)
if err != nil {
return fmt.Errorf("failed to write prompt template %q: %v", template.Name, err)
}
@@ -170,7 +170,7 @@ func InstallModel(basePath, nameOverride string, config *Config, configOverrides
return fmt.Errorf("failed to marshal updated config YAML: %v", err)
}
err = os.WriteFile(configFilePath, updatedConfigYAML, 0644)
err = os.WriteFile(configFilePath, updatedConfigYAML, 0600)
if err != nil {
return fmt.Errorf("failed to write updated config file: %v", err)
}

View File

@@ -48,7 +48,7 @@ var _ = Describe("Model test", func() {
}}
out, err := yaml.Marshal(gallery)
Expect(err).ToNot(HaveOccurred())
err = os.WriteFile(filepath.Join(tempdir, "gallery_simple.yaml"), out, 0644)
err = os.WriteFile(filepath.Join(tempdir, "gallery_simple.yaml"), out, 0600)
Expect(err).ToNot(HaveOccurred())
galleries := []Gallery{

View File

@@ -131,10 +131,10 @@ func (s *server) PredictStream(in *pb.PredictOptions, stream pb.Backend_PredictS
done <- true
}()
s.llm.PredictStream(in, resultChan)
err := s.llm.PredictStream(in, resultChan)
<-done
return nil
return err
}
func (s *server) TokenizeString(ctx context.Context, in *pb.PredictOptions) (*pb.TokenizationResponse, error) {

View File

@@ -65,7 +65,7 @@ func (ml *ModelLoader) GetGRPCPID(id string) (int, error) {
func (ml *ModelLoader) startProcess(grpcProcess, id string, serverAddress string) error {
// Make sure the process is executable
if err := os.Chmod(grpcProcess, 0755); err != nil {
if err := os.Chmod(grpcProcess, 0700); err != nil {
return err
}

View File

@@ -21,9 +21,9 @@ var _ = Describe("TemplateCache", func() {
Expect(err).NotTo(HaveOccurred())
// Writing example template files
err = os.WriteFile(filepath.Join(tempDir, "example.tmpl"), []byte("Hello, {{.Name}}!"), 0644)
err = os.WriteFile(filepath.Join(tempDir, "example.tmpl"), []byte("Hello, {{.Name}}!"), 0600)
Expect(err).NotTo(HaveOccurred())
err = os.WriteFile(filepath.Join(tempDir, "empty.tmpl"), []byte(""), 0644)
err = os.WriteFile(filepath.Join(tempDir, "empty.tmpl"), []byte(""), 0600)
Expect(err).NotTo(HaveOccurred())
templateCache = templates.NewTemplateCache(tempDir)

View File

@@ -15,7 +15,7 @@ func SaveConfig(filePath, fileName string, obj any) {
}
absolutePath := filepath.Join(filePath, fileName)
err = os.WriteFile(absolutePath, file, 0644)
err = os.WriteFile(absolutePath, file, 0600)
if err != nil {
log.Error().Err(err).Str("filepath", absolutePath).Msg("failed to save configuration file")
}

View File

@@ -36,7 +36,7 @@ var _ = Describe("Integration tests for the stores backend(s) and internal APIs"
tmpdir, err = os.MkdirTemp("", "")
Expect(err).ToNot(HaveOccurred())
backendAssetsDir := filepath.Join(tmpdir, "backend-assets")
err = os.Mkdir(backendAssetsDir, 0755)
err = os.Mkdir(backendAssetsDir, 0750)
Expect(err).ToNot(HaveOccurred())
err = assets.ExtractFiles(backendAssets, backendAssetsDir)