fix: correctly propagate error during model load (#7610)

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto
2025-12-16 18:26:54 +01:00
committed by GitHub
parent b348a99b03
commit 424c95edba

View File

@@ -173,9 +173,8 @@ func (ml *ModelLoader) backendLoader(opts ...Option) (client grpc.Backend, err e
model, err := ml.LoadModel(o.modelID, o.model, ml.grpcModel(backend, o))
if err != nil {
err := ml.StopGRPC(only(o.modelID))
if err != nil {
log.Error().Err(err).Str("model", o.modelID).Msg("error stopping model")
if stopErr := ml.StopGRPC(only(o.modelID));stopErr != nil {
log.Error().Err(stopErr).Str("model", o.modelID).Msg("error stopping model")
}
log.Error().Str("modelID", o.modelID).Err(err).Msgf("Failed to load model %s with backend %s", o.modelID, o.backendString)
return nil, err