mirror of
https://github.com/mudler/LocalAI.git
synced 2026-04-01 05:36:49 -04:00
feat: Add LOCALAI_DISABLE_MCP environment variable to disable MCP support (#8816)
* feat: Add LOCALAI_DISABLE_MCP environment variable to disable MCP support - Added DisableMCP field to RunCMD struct in core/cli/run.go - Added LOCALAI_DISABLE_MCP environment variable support - Added DisableMCP field to ApplicationConfig struct - Added DisableMCP AppOption function - Updated MCP endpoint routing to check appConfig.DisableMCP - When LOCALAI_DISABLE_MCP is set to true/1/yes, MCP endpoints are not registered When set, all MCP functionality is disabled and appropriate error messages are returned to users. Use Cases: - Security-conscious deployments where MCP is not needed - Reducing attack surface - Compliance requirements that prohibit certain protocol support Environment variable: LOCALAI_DISABLE_MCP=true Signed-off-by: localai-bot <localai-bot@users.noreply.github.com> * docs: Add documentation for LOCALAI_DISABLE_MCP environment variable - Add section explaining how to disable MCP support using environment variable - Document use cases for disabling MCP - Provide examples for CLI and Docker usage Signed-off-by: localai-bot <localai-bot@users.noreply.github.com> --------- Signed-off-by: localai-bot <localai-bot@users.noreply.github.com> Co-authored-by: localai-bot <localai-bot@users.noreply.github.com>
This commit is contained in:
@@ -79,6 +79,7 @@ type RunCMD struct {
|
||||
LRUEvictionRetryInterval string `env:"LOCALAI_LRU_EVICTION_RETRY_INTERVAL,LRU_EVICTION_RETRY_INTERVAL" default:"1s" help:"Interval between retries when waiting for busy models to become idle (e.g., 1s, 2s) (default: 1s)" group:"backends"`
|
||||
Federated bool `env:"LOCALAI_FEDERATED,FEDERATED" help:"Enable federated instance" group:"federated"`
|
||||
DisableGalleryEndpoint bool `env:"LOCALAI_DISABLE_GALLERY_ENDPOINT,DISABLE_GALLERY_ENDPOINT" help:"Disable the gallery endpoints" group:"api"`
|
||||
DisableMCP bool `env:"LOCALAI_DISABLE_MCP,DISABLE_MCP" help:"Disable MCP (Model Context Protocol) support" group:"api" default:"false"`
|
||||
MachineTag string `env:"LOCALAI_MACHINE_TAG,MACHINE_TAG" help:"Add Machine-Tag header to each response which is useful to track the machine in the P2P network" group:"api"`
|
||||
LoadToMemory []string `env:"LOCALAI_LOAD_TO_MEMORY,LOAD_TO_MEMORY" help:"A list of models to load into memory at startup" group:"models"`
|
||||
EnableTracing bool `env:"LOCALAI_ENABLE_TRACING,ENABLE_TRACING" help:"Enable API tracing" group:"api"`
|
||||
@@ -198,6 +199,10 @@ func (r *RunCMD) Run(ctx *cliContext.Context) error {
|
||||
opts = append(opts, config.DisableGalleryEndpoint)
|
||||
}
|
||||
|
||||
if r.DisableMCP {
|
||||
opts = append(opts, config.DisableMCP)
|
||||
}
|
||||
|
||||
if idleWatchDog || busyWatchDog {
|
||||
opts = append(opts, config.EnableWatchDog)
|
||||
if idleWatchDog {
|
||||
|
||||
@@ -45,6 +45,7 @@ type ApplicationConfig struct {
|
||||
DisableMetrics bool
|
||||
HttpGetExemptedEndpoints []*regexp.Regexp
|
||||
DisableGalleryEndpoint bool
|
||||
DisableMCP bool
|
||||
LoadToMemory []string
|
||||
|
||||
Galleries []Gallery
|
||||
@@ -184,6 +185,10 @@ var DisableGalleryEndpoint = func(o *ApplicationConfig) {
|
||||
o.DisableGalleryEndpoint = true
|
||||
}
|
||||
|
||||
var DisableMCP = func(o *ApplicationConfig) {
|
||||
o.DisableMCP = true
|
||||
}
|
||||
|
||||
var EnableWatchDogBusyCheck = func(o *ApplicationConfig) {
|
||||
o.WatchDog = true
|
||||
o.WatchDogBusy = true
|
||||
|
||||
@@ -140,7 +140,7 @@ func RegisterLocalAIRoutes(router *echo.Echo,
|
||||
|
||||
// MCP endpoint - supports both streaming and non-streaming modes
|
||||
// Note: streaming mode is NOT compatible with the OpenAI apis. We have a set which streams more states.
|
||||
if evaluator != nil {
|
||||
if evaluator != nil && !appConfig.DisableMCP {
|
||||
mcpStreamHandler := localai.MCPEndpoint(cl, ml, evaluator, appConfig)
|
||||
mcpStreamMiddleware := []echo.MiddlewareFunc{
|
||||
requestExtractor.BuildFilteredFirstAvailableDefaultModel(config.BuildUsecaseFilterFn(config.FLAG_CHAT)),
|
||||
|
||||
@@ -365,3 +365,33 @@ mcp:
|
||||
|
||||
- [Awesome MCPs](https://github.com/punkpeye/awesome-mcp-servers)
|
||||
- [A list of MCPs by mudler](https://github.com/mudler/MCPs)
|
||||
|
||||
## Disabling MCP Support
|
||||
|
||||
You can completely disable MCP functionality in LocalAI by setting the `LOCALAI_DISABLE_MCP` environment variable to `true`, `1`, or `yes`:
|
||||
|
||||
```bash
|
||||
export LOCALAI_DISABLE_MCP=true
|
||||
```
|
||||
|
||||
When this environment variable is set, all MCP-related features will be disabled, including:
|
||||
- MCP server connections (both remote and stdio)
|
||||
- Agent tool execution
|
||||
- The `/mcp/v1/chat/completions` endpoint
|
||||
|
||||
This is useful when you want to:
|
||||
- Run LocalAI without MCP capabilities for security reasons
|
||||
- Reduce the attack surface by disabling unnecessary features
|
||||
- Troubleshoot MCP-related issues
|
||||
|
||||
### Example
|
||||
|
||||
```bash
|
||||
# Disable MCP completely
|
||||
LOCALAI_DISABLE_MCP=true localai run
|
||||
|
||||
# Or in Docker
|
||||
docker run -e LOCALAI_DISABLE_MCP=true localai/localai:latest
|
||||
```
|
||||
|
||||
When MCP is disabled, any model configuration with `mcp` sections will be ignored, and attempts to use the MCP endpoint will return an error indicating that MCP support is disabled.
|
||||
|
||||
Reference in New Issue
Block a user