mirror of
https://github.com/mudler/LocalAI.git
synced 2026-04-08 16:58:54 -04:00
feat(backends): add system backend, refactor (#6059)
- Add a system backend path - Refactor and consolidate system information in system state - Use system state in all the components to figure out the system paths to used whenever needed - Refactor BackendConfig -> ModelConfig. This was otherway misleading as now we do have a backend configuration which is not the model config. Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
committed by
GitHub
parent
253b7537dc
commit
089efe05fd
@@ -6,6 +6,7 @@ import (
|
||||
|
||||
cliContext "github.com/mudler/LocalAI/core/cli/context"
|
||||
"github.com/mudler/LocalAI/core/config"
|
||||
"github.com/mudler/LocalAI/pkg/system"
|
||||
|
||||
"github.com/mudler/LocalAI/core/gallery"
|
||||
"github.com/mudler/LocalAI/core/startup"
|
||||
@@ -14,8 +15,9 @@ import (
|
||||
)
|
||||
|
||||
type BackendsCMDFlags struct {
|
||||
BackendGalleries string `env:"LOCALAI_BACKEND_GALLERIES,BACKEND_GALLERIES" help:"JSON list of backend galleries" group:"backends" default:"${backends}"`
|
||||
BackendsPath string `env:"LOCALAI_BACKENDS_PATH,BACKENDS_PATH" type:"path" default:"${basepath}/backends" help:"Path containing backends used for inferencing" group:"storage"`
|
||||
BackendGalleries string `env:"LOCALAI_BACKEND_GALLERIES,BACKEND_GALLERIES" help:"JSON list of backend galleries" group:"backends" default:"${backends}"`
|
||||
BackendsPath string `env:"LOCALAI_BACKENDS_PATH,BACKENDS_PATH" type:"path" default:"${basepath}/backends" help:"Path containing backends used for inferencing" group:"storage"`
|
||||
BackendsSystemPath string `env:"LOCALAI_BACKENDS_SYSTEM_PATH,BACKEND_SYSTEM_PATH" type:"path" default:"/usr/share/localai/backends" help:"Path containing system backends used for inferencing" group:"backends"`
|
||||
}
|
||||
|
||||
type BackendsList struct {
|
||||
@@ -48,7 +50,15 @@ func (bl *BackendsList) Run(ctx *cliContext.Context) error {
|
||||
log.Error().Err(err).Msg("unable to load galleries")
|
||||
}
|
||||
|
||||
backends, err := gallery.AvailableBackends(galleries, bl.BackendsPath)
|
||||
systemState, err := system.GetSystemState(
|
||||
system.WithBackendSystemPath(bl.BackendsSystemPath),
|
||||
system.WithBackendPath(bl.BackendsPath),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
backends, err := gallery.AvailableBackends(galleries, systemState)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -68,6 +78,14 @@ func (bi *BackendsInstall) Run(ctx *cliContext.Context) error {
|
||||
log.Error().Err(err).Msg("unable to load galleries")
|
||||
}
|
||||
|
||||
systemState, err := system.GetSystemState(
|
||||
system.WithBackendSystemPath(bi.BackendsSystemPath),
|
||||
system.WithBackendPath(bi.BackendsPath),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
progressBar := progressbar.NewOptions(
|
||||
1000,
|
||||
progressbar.OptionSetDescription(fmt.Sprintf("downloading backend %s", bi.BackendArgs)),
|
||||
@@ -82,7 +100,7 @@ func (bi *BackendsInstall) Run(ctx *cliContext.Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
err := startup.InstallExternalBackends(galleries, bi.BackendsPath, progressCallback, bi.BackendArgs, bi.Name, bi.Alias)
|
||||
err = startup.InstallExternalBackends(galleries, systemState, progressCallback, bi.BackendArgs, bi.Name, bi.Alias)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -94,7 +112,15 @@ func (bu *BackendsUninstall) Run(ctx *cliContext.Context) error {
|
||||
for _, backendName := range bu.BackendArgs {
|
||||
log.Info().Str("backend", backendName).Msg("uninstalling backend")
|
||||
|
||||
err := gallery.DeleteBackendFromSystem(bu.BackendsPath, backendName)
|
||||
systemState, err := system.GetSystemState(
|
||||
system.WithBackendSystemPath(bu.BackendsSystemPath),
|
||||
system.WithBackendPath(bu.BackendsPath),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = gallery.DeleteBackendFromSystem(systemState, backendName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/mudler/LocalAI/core/gallery"
|
||||
"github.com/mudler/LocalAI/core/startup"
|
||||
"github.com/mudler/LocalAI/pkg/downloader"
|
||||
"github.com/mudler/LocalAI/pkg/system"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/schollz/progressbar/v3"
|
||||
)
|
||||
@@ -45,7 +46,14 @@ func (ml *ModelsList) Run(ctx *cliContext.Context) error {
|
||||
log.Error().Err(err).Msg("unable to load galleries")
|
||||
}
|
||||
|
||||
models, err := gallery.AvailableGalleryModels(galleries, ml.ModelsPath)
|
||||
systemState, err := system.GetSystemState(
|
||||
system.WithModelPath(ml.ModelsPath),
|
||||
system.WithBackendPath(ml.BackendsPath),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
models, err := gallery.AvailableGalleryModels(galleries, systemState)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -60,6 +68,15 @@ func (ml *ModelsList) Run(ctx *cliContext.Context) error {
|
||||
}
|
||||
|
||||
func (mi *ModelsInstall) Run(ctx *cliContext.Context) error {
|
||||
|
||||
systemState, err := system.GetSystemState(
|
||||
system.WithModelPath(mi.ModelsPath),
|
||||
system.WithBackendPath(mi.BackendsPath),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var galleries []config.Gallery
|
||||
if err := json.Unmarshal([]byte(mi.Galleries), &galleries); err != nil {
|
||||
log.Error().Err(err).Msg("unable to load galleries")
|
||||
@@ -86,7 +103,7 @@ func (mi *ModelsInstall) Run(ctx *cliContext.Context) error {
|
||||
}
|
||||
}
|
||||
//startup.InstallModels()
|
||||
models, err := gallery.AvailableGalleryModels(galleries, mi.ModelsPath)
|
||||
models, err := gallery.AvailableGalleryModels(galleries, systemState)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -94,7 +111,7 @@ func (mi *ModelsInstall) Run(ctx *cliContext.Context) error {
|
||||
modelURI := downloader.URI(modelName)
|
||||
|
||||
if !modelURI.LooksLikeOCI() {
|
||||
model := gallery.FindGalleryElement(models, modelName, mi.ModelsPath)
|
||||
model := gallery.FindGalleryElement(models, modelName)
|
||||
if model == nil {
|
||||
log.Error().Str("model", modelName).Msg("model not found")
|
||||
return err
|
||||
@@ -108,7 +125,7 @@ func (mi *ModelsInstall) Run(ctx *cliContext.Context) error {
|
||||
log.Info().Str("model", modelName).Str("license", model.License).Msg("installing model")
|
||||
}
|
||||
|
||||
err = startup.InstallModels(galleries, backendGalleries, mi.ModelsPath, mi.BackendsPath, !mi.DisablePredownloadScan, mi.AutoloadBackendGalleries, progressCallback, modelName)
|
||||
err = startup.InstallModels(galleries, backendGalleries, systemState, !mi.DisablePredownloadScan, mi.AutoloadBackendGalleries, progressCallback, modelName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"github.com/mudler/LocalAI/core/config"
|
||||
"github.com/mudler/LocalAI/core/http"
|
||||
"github.com/mudler/LocalAI/core/p2p"
|
||||
"github.com/mudler/LocalAI/pkg/system"
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
@@ -22,6 +23,7 @@ type RunCMD struct {
|
||||
|
||||
ExternalBackends []string `env:"LOCALAI_EXTERNAL_BACKENDS,EXTERNAL_BACKENDS" help:"A list of external backends to load from gallery on boot" group:"backends"`
|
||||
BackendsPath string `env:"LOCALAI_BACKENDS_PATH,BACKENDS_PATH" type:"path" default:"${basepath}/backends" help:"Path containing backends used for inferencing" group:"backends"`
|
||||
BackendsSystemPath string `env:"LOCALAI_BACKENDS_SYSTEM_PATH,BACKEND_SYSTEM_PATH" type:"path" default:"/usr/share/localai/backends" help:"Path containing system backends used for inferencing" group:"backends"`
|
||||
ModelsPath string `env:"LOCALAI_MODELS_PATH,MODELS_PATH" type:"path" default:"${basepath}/models" help:"Path containing models used for inferencing" group:"storage"`
|
||||
GeneratedContentPath string `env:"LOCALAI_GENERATED_CONTENT_PATH,GENERATED_CONTENT_PATH" type:"path" default:"/tmp/generated/content" help:"Location for generated content (e.g. images, audio, videos)" group:"storage"`
|
||||
UploadPath string `env:"LOCALAI_UPLOAD_PATH,UPLOAD_PATH" type:"path" default:"/tmp/localai/upload" help:"Path to store uploads from files api" group:"storage"`
|
||||
@@ -77,12 +79,20 @@ func (r *RunCMD) Run(ctx *cliContext.Context) error {
|
||||
os.MkdirAll(r.BackendsPath, 0750)
|
||||
os.MkdirAll(r.ModelsPath, 0750)
|
||||
|
||||
systemState, err := system.GetSystemState(
|
||||
system.WithBackendSystemPath(r.BackendsSystemPath),
|
||||
system.WithModelPath(r.ModelsPath),
|
||||
system.WithBackendPath(r.BackendsPath),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
opts := []config.AppOption{
|
||||
config.WithConfigFile(r.ModelsConfigFile),
|
||||
config.WithJSONStringPreload(r.PreloadModels),
|
||||
config.WithYAMLConfigPreload(r.PreloadModelsConfig),
|
||||
config.WithModelPath(r.ModelsPath),
|
||||
config.WithBackendsPath(r.BackendsPath),
|
||||
config.WithSystemState(systemState),
|
||||
config.WithContextSize(r.ContextSize),
|
||||
config.WithDebug(zerolog.GlobalLevel() <= zerolog.DebugLevel),
|
||||
config.WithGeneratedContentDir(r.GeneratedContentPath),
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
cliContext "github.com/mudler/LocalAI/core/cli/context"
|
||||
"github.com/mudler/LocalAI/core/config"
|
||||
"github.com/mudler/LocalAI/pkg/model"
|
||||
"github.com/mudler/LocalAI/pkg/system"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
@@ -56,6 +57,13 @@ func (t *SoundGenerationCMD) Run(ctx *cliContext.Context) error {
|
||||
}
|
||||
text := strings.Join(t.Text, " ")
|
||||
|
||||
systemState, err := system.GetSystemState(
|
||||
system.WithModelPath(t.ModelsPath),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
externalBackends := make(map[string]string)
|
||||
// split ":" to get backend name and the uri
|
||||
for _, v := range t.ExternalGRPCBackends {
|
||||
@@ -66,12 +74,12 @@ func (t *SoundGenerationCMD) Run(ctx *cliContext.Context) error {
|
||||
}
|
||||
|
||||
opts := &config.ApplicationConfig{
|
||||
ModelPath: t.ModelsPath,
|
||||
SystemState: systemState,
|
||||
Context: context.Background(),
|
||||
GeneratedContentDir: outputDir,
|
||||
ExternalGRPCBackends: externalBackends,
|
||||
}
|
||||
ml := model.NewModelLoader(opts.ModelPath, opts.SingleBackend)
|
||||
ml := model.NewModelLoader(systemState, opts.SingleBackend)
|
||||
|
||||
defer func() {
|
||||
err := ml.StopAllGRPC()
|
||||
@@ -80,7 +88,7 @@ func (t *SoundGenerationCMD) Run(ctx *cliContext.Context) error {
|
||||
}
|
||||
}()
|
||||
|
||||
options := config.BackendConfig{}
|
||||
options := config.ModelConfig{}
|
||||
options.SetDefaults()
|
||||
options.Backend = t.Backend
|
||||
options.Model = t.Model
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
cliContext "github.com/mudler/LocalAI/core/cli/context"
|
||||
"github.com/mudler/LocalAI/core/config"
|
||||
"github.com/mudler/LocalAI/pkg/model"
|
||||
"github.com/mudler/LocalAI/pkg/system"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
@@ -24,18 +25,24 @@ type TranscriptCMD struct {
|
||||
}
|
||||
|
||||
func (t *TranscriptCMD) Run(ctx *cliContext.Context) error {
|
||||
systemState, err := system.GetSystemState(
|
||||
system.WithModelPath(t.ModelsPath),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opts := &config.ApplicationConfig{
|
||||
ModelPath: t.ModelsPath,
|
||||
Context: context.Background(),
|
||||
SystemState: systemState,
|
||||
Context: context.Background(),
|
||||
}
|
||||
|
||||
cl := config.NewBackendConfigLoader(t.ModelsPath)
|
||||
ml := model.NewModelLoader(opts.ModelPath, opts.SingleBackend)
|
||||
if err := cl.LoadBackendConfigsFromPath(t.ModelsPath); err != nil {
|
||||
cl := config.NewModelConfigLoader(t.ModelsPath)
|
||||
ml := model.NewModelLoader(systemState, opts.SingleBackend)
|
||||
if err := cl.LoadModelConfigsFromPath(t.ModelsPath); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, exists := cl.GetBackendConfig(t.Model)
|
||||
c, exists := cl.GetModelConfig(t.Model)
|
||||
if !exists {
|
||||
return errors.New("model not found")
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
cliContext "github.com/mudler/LocalAI/core/cli/context"
|
||||
"github.com/mudler/LocalAI/core/config"
|
||||
"github.com/mudler/LocalAI/pkg/model"
|
||||
"github.com/mudler/LocalAI/pkg/system"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
@@ -34,12 +35,20 @@ func (t *TTSCMD) Run(ctx *cliContext.Context) error {
|
||||
|
||||
text := strings.Join(t.Text, " ")
|
||||
|
||||
systemState, err := system.GetSystemState(
|
||||
system.WithModelPath(t.ModelsPath),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
opts := &config.ApplicationConfig{
|
||||
ModelPath: t.ModelsPath,
|
||||
SystemState: systemState,
|
||||
Context: context.Background(),
|
||||
GeneratedContentDir: outputDir,
|
||||
}
|
||||
ml := model.NewModelLoader(opts.ModelPath, opts.SingleBackend)
|
||||
|
||||
ml := model.NewModelLoader(systemState, opts.SingleBackend)
|
||||
|
||||
defer func() {
|
||||
err := ml.StopAllGRPC()
|
||||
@@ -48,7 +57,7 @@ func (t *TTSCMD) Run(ctx *cliContext.Context) error {
|
||||
}
|
||||
}()
|
||||
|
||||
options := config.BackendConfig{}
|
||||
options := config.ModelConfig{}
|
||||
options.SetDefaults()
|
||||
options.Backend = t.Backend
|
||||
options.Model = t.Model
|
||||
|
||||
@@ -17,6 +17,7 @@ import (
|
||||
"github.com/mudler/LocalAI/core/gallery"
|
||||
"github.com/mudler/LocalAI/pkg/downloader"
|
||||
"github.com/mudler/LocalAI/pkg/oci"
|
||||
"github.com/mudler/LocalAI/pkg/system"
|
||||
)
|
||||
|
||||
type UtilCMD struct {
|
||||
@@ -108,6 +109,14 @@ func (u *GGUFInfoCMD) Run(ctx *cliContext.Context) error {
|
||||
}
|
||||
|
||||
func (hfscmd *HFScanCMD) Run(ctx *cliContext.Context) error {
|
||||
|
||||
systemState, err := system.GetSystemState(
|
||||
system.WithModelPath(hfscmd.ModelsPath),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Info().Msg("LocalAI Security Scanner - This is BEST EFFORT functionality! Currently limited to huggingface models!")
|
||||
if len(hfscmd.ToScan) == 0 {
|
||||
log.Info().Msg("Checking all installed models against galleries")
|
||||
@@ -116,7 +125,7 @@ func (hfscmd *HFScanCMD) Run(ctx *cliContext.Context) error {
|
||||
log.Error().Err(err).Msg("unable to load galleries")
|
||||
}
|
||||
|
||||
err := gallery.SafetyScanGalleryModels(galleries, hfscmd.ModelsPath)
|
||||
err := gallery.SafetyScanGalleryModels(galleries, systemState)
|
||||
if err == nil {
|
||||
log.Info().Msg("No security warnings were detected for your installed models. Please note that this is a BEST EFFORT tool, and all issues may not be detected.")
|
||||
} else {
|
||||
@@ -150,17 +159,17 @@ func (uhcmd *UsecaseHeuristicCMD) Run(ctx *cliContext.Context) error {
|
||||
log.Error().Msg("ModelsPath is a required parameter")
|
||||
return fmt.Errorf("model path is a required parameter")
|
||||
}
|
||||
bcl := config.NewBackendConfigLoader(uhcmd.ModelsPath)
|
||||
err := bcl.LoadBackendConfig(uhcmd.ConfigName)
|
||||
bcl := config.NewModelConfigLoader(uhcmd.ModelsPath)
|
||||
err := bcl.ReadModelConfig(uhcmd.ConfigName)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Str("ConfigName", uhcmd.ConfigName).Msg("error while loading backend")
|
||||
return err
|
||||
}
|
||||
bc, exists := bcl.GetBackendConfig(uhcmd.ConfigName)
|
||||
bc, exists := bcl.GetModelConfig(uhcmd.ConfigName)
|
||||
if !exists {
|
||||
log.Error().Str("ConfigName", uhcmd.ConfigName).Msg("ConfigName not found")
|
||||
}
|
||||
for name, uc := range config.GetAllBackendConfigUsecases() {
|
||||
for name, uc := range config.GetAllModelConfigUsecases() {
|
||||
if bc.HasUsecases(uc) {
|
||||
log.Info().Str("Usecase", name)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
package worker
|
||||
|
||||
type WorkerFlags struct {
|
||||
BackendsPath string `env:"LOCALAI_BACKENDS_PATH,BACKENDS_PATH" type:"path" default:"${basepath}/backends" help:"Path containing backends used for inferencing" group:"backends"`
|
||||
ExtraLLamaCPPArgs string `name:"llama-cpp-args" env:"LOCALAI_EXTRA_LLAMA_CPP_ARGS,EXTRA_LLAMA_CPP_ARGS" help:"Extra arguments to pass to llama-cpp-rpc-server"`
|
||||
BackendsPath string `env:"LOCALAI_BACKENDS_PATH,BACKENDS_PATH" type:"path" default:"${basepath}/backends" help:"Path containing backends used for inferencing" group:"backends"`
|
||||
BackendsSystemPath string `env:"LOCALAI_BACKENDS_SYSTEM_PATH,BACKEND_SYSTEM_PATH" type:"path" default:"/usr/share/localai/backends" help:"Path containing system backends used for inferencing" group:"backends"`
|
||||
ExtraLLamaCPPArgs string `name:"llama-cpp-args" env:"LOCALAI_EXTRA_LLAMA_CPP_ARGS,EXTRA_LLAMA_CPP_ARGS" help:"Extra arguments to pass to llama-cpp-rpc-server"`
|
||||
}
|
||||
|
||||
type Worker struct {
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
|
||||
cliContext "github.com/mudler/LocalAI/core/cli/context"
|
||||
"github.com/mudler/LocalAI/core/gallery"
|
||||
"github.com/mudler/LocalAI/pkg/system"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
@@ -21,20 +22,19 @@ const (
|
||||
llamaCPPRPCBinaryName = "llama-cpp-rpc-server"
|
||||
)
|
||||
|
||||
func findLLamaCPPBackend(backendSystemPath string) (string, error) {
|
||||
backends, err := gallery.ListSystemBackends(backendSystemPath)
|
||||
func findLLamaCPPBackend(systemState *system.SystemState) (string, error) {
|
||||
backends, err := gallery.ListSystemBackends(systemState)
|
||||
if err != nil {
|
||||
log.Warn().Msgf("Failed listing system backends: %s", err)
|
||||
return "", err
|
||||
}
|
||||
log.Debug().Msgf("System backends: %v", backends)
|
||||
|
||||
backendPath := ""
|
||||
backend, ok := backends.Get("llama-cpp")
|
||||
if !ok {
|
||||
return "", errors.New("llama-cpp backend not found, install it first")
|
||||
}
|
||||
backendPath = filepath.Dir(backend.RunFile)
|
||||
backendPath := filepath.Dir(backend.RunFile)
|
||||
|
||||
if backendPath == "" {
|
||||
return "", errors.New("llama-cpp backend not found, install it first")
|
||||
@@ -54,7 +54,14 @@ func (r *LLamaCPP) Run(ctx *cliContext.Context) error {
|
||||
return fmt.Errorf("usage: local-ai worker llama-cpp-rpc -- <llama-rpc-server-args>")
|
||||
}
|
||||
|
||||
grpcProcess, err := findLLamaCPPBackend(r.BackendsPath)
|
||||
systemState, err := system.GetSystemState(
|
||||
system.WithBackendPath(r.BackendsPath),
|
||||
system.WithBackendSystemPath(r.BackendsSystemPath),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
grpcProcess, err := findLLamaCPPBackend(systemState)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
|
||||
cliContext "github.com/mudler/LocalAI/core/cli/context"
|
||||
"github.com/mudler/LocalAI/core/p2p"
|
||||
"github.com/mudler/LocalAI/pkg/system"
|
||||
"github.com/phayes/freeport"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
@@ -25,6 +26,14 @@ type P2P struct {
|
||||
|
||||
func (r *P2P) Run(ctx *cliContext.Context) error {
|
||||
|
||||
systemState, err := system.GetSystemState(
|
||||
system.WithBackendPath(r.BackendsPath),
|
||||
system.WithBackendSystemPath(r.BackendsSystemPath),
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Check if the token is set
|
||||
// as we always need it.
|
||||
if r.Token == "" {
|
||||
@@ -60,7 +69,7 @@ func (r *P2P) Run(ctx *cliContext.Context) error {
|
||||
for {
|
||||
log.Info().Msgf("Starting llama-cpp-rpc-server on '%s:%d'", address, port)
|
||||
|
||||
grpcProcess, err := findLLamaCPPBackend(r.BackendsPath)
|
||||
grpcProcess, err := findLLamaCPPBackend(systemState)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Failed to find llama-cpp-rpc-server")
|
||||
return
|
||||
|
||||
Reference in New Issue
Block a user