Compare commits

..

9 Commits

Author SHA1 Message Date
Parth Sareen
6b2abfb433 server: add tests and fix isHuggingFaceURL edge case
- Add comprehensive tests for isHuggingFaceURL and getNumDownloadParts
- Fix bug where domains ending in huggingface.co (like nothuggingface.co)
  would incorrectly match as HuggingFace URLs
- Improve code comments with more detailed documentation
2026-01-18 16:45:17 -08:00
Parth Sareen
805ed4644c server: reduce download concurrency for HuggingFace URLs
Reduces concurrent download parts from 16 to 4 for HuggingFace URLs
to avoid triggering rate limits (HTTP 429 errors).

Adds OLLAMA_HF_CONCURRENCY environment variable for users who want
to customize the concurrency level.

Fixes #13297
2026-01-18 16:38:49 -08:00
Daniel Hiltgen
e4b488a7b5 CI: dedup cuda libraries to reduce payload size (#13704) 2026-01-13 11:25:31 -08:00
Daniel Hiltgen
98079ddd79 ci: add missing mlx components to release build (#13702) 2026-01-13 09:13:09 -08:00
Jeffrey Morgan
d70942f47b x/imagegen/cli: skip local model check (#13699) 2026-01-12 22:38:10 -08:00
Jeffrey Morgan
58e4701557 scripts: increase notarization timeout to 20m (#13697)
The 100MB mlx.metallib file significantly increased the app bundle size,
causing Apple's notarization service to timeout with the previous 10m limit.
2026-01-12 20:38:38 -08:00
Jeffrey Morgan
dbf47ee55a cmake: use CMAKE_SYSTEM_PROCESSOR instead of CMAKE_OSX_ARCHITECTURES for mlx.metallib install (#13696)
The CMake condition for installing mlx.metallib checks
CMAKE_OSX_ARCHITECTURES, but this variable is only set when explicitly
passed - not auto-detected. The arm64 build was missing this flag,
causing the metallib to not be installed, which then caused codesign
to fail on the unexpanded glob pattern.
2026-01-12 20:05:11 -08:00
Jeffrey Morgan
af7ea6e96e x/imagegen: install mlx.metallib and fix macOS rpath handling, add mlx library directories to LD_LIBRARY_PATH (#13695)
- Install mlx.metallib for arm64 builds (required for Metal GPU acceleration)
- Apply rpath settings to all macOS builds, not just x86_64
- Add CMAKE_BUILD_WITH_INSTALL_RPATH to avoid install_name_tool errors
- Update build_darwin.sh to copy, sign, and package the metallib
2026-01-12 19:03:11 -08:00
Jeffrey Morgan
8f1e0140e7 x/imagegen: fix mlx build in Dockerfile and macOS build script (#13693) 2026-01-12 15:52:43 -08:00
17 changed files with 406 additions and 1390 deletions

View File

@@ -372,13 +372,17 @@ jobs:
outputs: type=local,dest=dist/${{ matrix.os }}-${{ matrix.arch }}
cache-from: type=registry,ref=${{ vars.DOCKER_REPO }}:latest
cache-to: type=inline
- name: Deduplicate CUDA libraries
run: |
./scripts/deduplicate_cuda_libs.sh dist/${{ matrix.os }}-${{ matrix.arch }}
- run: |
for COMPONENT in bin/* lib/ollama/*; do
case "$COMPONENT" in
bin/ollama) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
bin/ollama*) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
lib/ollama/*.so*) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
lib/ollama/cuda_v*) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
lib/ollama/vulkan*) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
lib/ollama/mlx*) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
lib/ollama/cuda_jetpack5) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}-jetpack5.tar.in ;;
lib/ollama/cuda_jetpack6) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}-jetpack6.tar.in ;;
lib/ollama/rocm) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}-rocm.tar.in ;;

View File

@@ -48,9 +48,10 @@ if((CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_OSX_ARCHITECTURES MATCHES "arm64")
set(GGML_CPU_ALL_VARIANTS ON)
endif()
if (CMAKE_OSX_ARCHITECTURES MATCHES "x86_64")
if(APPLE)
set(CMAKE_BUILD_RPATH "@loader_path")
set(CMAKE_INSTALL_RPATH "@loader_path")
set(CMAKE_BUILD_WITH_INSTALL_RPATH ON)
endif()
set(OLLAMA_BUILD_DIR ${CMAKE_BINARY_DIR}/lib/ollama)
@@ -196,6 +197,14 @@ if(MLX_ENGINE)
FRAMEWORK DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT MLX
)
# Install the Metal library for macOS arm64 (must be colocated with the binary)
# Metal backend is only built for arm64, not x86_64
if(APPLE AND CMAKE_SYSTEM_PROCESSOR STREQUAL "arm64")
install(FILES ${CMAKE_BINARY_DIR}/_deps/mlx-build/mlx/backend/metal/kernels/mlx.metallib
DESTINATION ${OLLAMA_INSTALL_DIR}
COMPONENT MLX)
endif()
# Manually install cudart and cublas since they might not be picked up as direct dependencies
if(CUDAToolkit_FOUND)
file(GLOB CUDART_LIBS

View File

@@ -161,6 +161,9 @@ ARG GOFLAGS="'-ldflags=-w -s'"
ENV CGO_ENABLED=1
ARG CGO_CFLAGS
ARG CGO_CXXFLAGS
RUN mkdir -p dist/bin
RUN --mount=type=cache,target=/root/.cache/go-build \
go build -tags mlx -trimpath -buildmode=pie -o dist/bin/ollama-mlx .
FROM base AS build
WORKDIR /go/src/github.com/ollama/ollama
@@ -182,6 +185,7 @@ COPY --from=cuda-12 dist/lib/ollama /lib/ollama/
COPY --from=cuda-13 dist/lib/ollama /lib/ollama/
COPY --from=vulkan dist/lib/ollama /lib/ollama/
COPY --from=mlx /go/src/github.com/ollama/ollama/dist/lib/ollama /lib/ollama/
COPY --from=mlx /go/src/github.com/ollama/ollama/dist/bin/ /bin/
FROM --platform=linux/arm64 scratch AS arm64
# COPY --from=cuda-11 dist/lib/ollama/ /lib/ollama/

View File

@@ -1,558 +0,0 @@
package cmd
import (
"context"
"encoding/json"
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
"github.com/ollama/ollama/api"
"github.com/spf13/cobra"
)
type EnvVar struct {
Name string
Value string
}
type AppConfig struct {
Name string
DisplayName string
Command string
EnvVars func(model string) []EnvVar
Args func(model string) []string
Setup func(model string) error
CheckInstall func() error
}
var ClaudeConfig = &AppConfig{
Name: "Claude",
DisplayName: "Claude Code",
Command: "claude",
EnvVars: func(model string) []EnvVar {
return []EnvVar{
{Name: "ANTHROPIC_BASE_URL", Value: "http://localhost:11434"},
{Name: "ANTHROPIC_API_KEY", Value: "ollama"},
{Name: "ANTHROPIC_AUTH_TOKEN", Value: "ollama"},
}
},
Args: func(model string) []string {
if model == "" {
return nil
}
return []string{"--model", model}
},
CheckInstall: func() error {
if _, err := exec.LookPath("claude"); err != nil {
return fmt.Errorf("claude is not installed. Install with: npm install -g @anthropic-ai/claude-code")
}
return nil
},
}
var DroidConfig = &AppConfig{
Name: "Droid",
DisplayName: "Droid",
Command: "droid",
EnvVars: func(model string) []EnvVar { return nil },
Args: func(model string) []string { return nil },
Setup: setupDroidSettings,
CheckInstall: func() error {
if _, err := exec.LookPath("droid"); err != nil {
return fmt.Errorf("droid is not installed. Install from: https://docs.factory.ai/cli/install")
}
return nil
},
}
var AppRegistry = map[string]*AppConfig{
"claude": ClaudeConfig,
"claude-code": ClaudeConfig,
"droid": DroidConfig,
"opencode": OpenCodeConfig,
}
func GetApp(name string) (*AppConfig, bool) {
app, ok := AppRegistry[strings.ToLower(name)]
return app, ok
}
func getModelContextLength(model string) int {
client, err := api.ClientFromEnvironment()
if err != nil {
return 8192
}
resp, err := client.Show(context.Background(), &api.ShowRequest{Model: model})
if err != nil || resp.ModelInfo == nil {
return 8192
}
arch, ok := resp.ModelInfo["general.architecture"].(string)
if !ok {
return 8192
}
if v, ok := resp.ModelInfo[fmt.Sprintf("%s.context_length", arch)].(float64); ok {
return min(int(v), 128000)
}
return 8192
}
func setupDroidSettings(model string) error {
home, err := os.UserHomeDir()
if err != nil {
return err
}
settingsPath := filepath.Join(home, ".factory", "settings.json")
if err := os.MkdirAll(filepath.Dir(settingsPath), 0755); err != nil {
return err
}
settings := make(map[string]any)
if data, err := os.ReadFile(settingsPath); err == nil {
json.Unmarshal(data, &settings)
}
var customModels []any
if existing, ok := settings["customModels"].([]any); ok {
customModels = existing
}
maxIndex := 0
existingIdx := -1
var existingID string
for i, m := range customModels {
if entry, ok := m.(map[string]any); ok {
if entry["model"] == model {
existingIdx = i
if id, ok := entry["id"].(string); ok {
existingID = id
}
}
if idx, ok := entry["index"].(float64); ok && int(idx) > maxIndex {
maxIndex = int(idx)
}
}
}
var modelID string
newEntry := map[string]any{
"model": model,
"displayName": fmt.Sprintf("%s [Ollama]", model),
"baseUrl": "http://localhost:11434/v1",
"apiKey": "ollama",
"provider": "generic-chat-completion-api",
"maxOutputTokens": getModelContextLength(model),
"noImageSupport": true,
}
if existingIdx >= 0 {
modelID = existingID
newEntry["id"] = existingID
customModels[existingIdx] = newEntry
} else {
newIndex := maxIndex + 1
modelID = fmt.Sprintf("custom:%s-[Ollama]-%d", model, newIndex)
newEntry["id"] = modelID
newEntry["index"] = newIndex
customModels = append(customModels, newEntry)
}
settings["customModels"] = customModels
sessionSettings, ok := settings["sessionDefaultSettings"].(map[string]any)
if !ok {
sessionSettings = make(map[string]any)
}
sessionSettings["model"] = modelID
settings["sessionDefaultSettings"] = sessionSettings
data, err := json.MarshalIndent(settings, "", " ")
if err != nil {
return err
}
return os.WriteFile(settingsPath, data, 0644)
}
func setupOpenCodeSettings(model string) error {
home, err := os.UserHomeDir()
if err != nil {
return err
}
configPath := filepath.Join(home, ".config", "opencode", "opencode.json")
if err := os.MkdirAll(filepath.Dir(configPath), 0755); err != nil {
return err
}
config := make(map[string]any)
if data, err := os.ReadFile(configPath); err == nil {
json.Unmarshal(data, &config)
}
config["$schema"] = "https://opencode.ai/config.json"
provider, ok := config["provider"].(map[string]any)
if !ok {
provider = make(map[string]any)
}
ollama, ok := provider["ollama"].(map[string]any)
if !ok {
ollama = map[string]any{
"npm": "@ai-sdk/openai-compatible",
"name": "Ollama (local)",
"options": map[string]any{
"baseURL": "http://localhost:11434/v1",
},
}
}
models, ok := ollama["models"].(map[string]any)
if !ok {
models = make(map[string]any)
}
models[model] = map[string]any{
"name": fmt.Sprintf("%s [Ollama]", model),
}
ollama["models"] = models
provider["ollama"] = ollama
config["provider"] = provider
data, err := json.MarshalIndent(config, "", " ")
if err != nil {
return err
}
if err := os.WriteFile(configPath, data, 0644); err != nil {
return err
}
statePath := filepath.Join(home, ".local", "state", "opencode", "model.json")
if err := os.MkdirAll(filepath.Dir(statePath), 0755); err != nil {
return err
}
state := map[string]any{
"recent": []any{},
"favorite": []any{},
"variant": map[string]any{},
}
if data, err := os.ReadFile(statePath); err == nil {
json.Unmarshal(data, &state)
}
recent, _ := state["recent"].([]any)
newRecent := []any{}
for _, entry := range recent {
if e, ok := entry.(map[string]any); ok {
if e["providerID"] == "ollama" && e["modelID"] == model {
continue
}
}
newRecent = append(newRecent, entry)
}
newRecent = append([]any{map[string]any{
"providerID": "ollama",
"modelID": model,
}}, newRecent...)
if len(newRecent) > 10 {
newRecent = newRecent[:10]
}
state["recent"] = newRecent
data, err = json.MarshalIndent(state, "", " ")
if err != nil {
return err
}
return os.WriteFile(statePath, data, 0644)
}
var OpenCodeConfig = &AppConfig{
Name: "OpenCode",
DisplayName: "OpenCode",
Command: "opencode",
EnvVars: func(model string) []EnvVar { return nil },
Args: func(model string) []string { return nil },
Setup: setupOpenCodeSettings,
CheckInstall: func() error {
if _, err := exec.LookPath("opencode"); err != nil {
return fmt.Errorf("opencode is not installed. Install from: https://opencode.ai")
}
return nil
},
}
func getOpenCodeConfiguredModel() string {
home, err := os.UserHomeDir()
if err != nil {
return ""
}
statePath := filepath.Join(home, ".local", "state", "opencode", "model.json")
data, err := os.ReadFile(statePath)
if err != nil {
return ""
}
var state map[string]any
if err := json.Unmarshal(data, &state); err != nil {
return ""
}
recent, ok := state["recent"].([]any)
if !ok || len(recent) == 0 {
return ""
}
first, ok := recent[0].(map[string]any)
if !ok {
return ""
}
if first["providerID"] == "ollama" {
if modelID, ok := first["modelID"].(string); ok {
return modelID
}
}
return ""
}
func getDroidConfiguredModel() string {
home, err := os.UserHomeDir()
if err != nil {
return ""
}
settingsPath := filepath.Join(home, ".factory", "settings.json")
data, err := os.ReadFile(settingsPath)
if err != nil {
return ""
}
var settings map[string]any
if err := json.Unmarshal(data, &settings); err != nil {
return ""
}
sessionSettings, ok := settings["sessionDefaultSettings"].(map[string]any)
if !ok {
return ""
}
modelID, ok := sessionSettings["model"].(string)
if !ok || modelID == "" {
return ""
}
customModels, ok := settings["customModels"].([]any)
if !ok {
return ""
}
for _, m := range customModels {
entry, ok := m.(map[string]any)
if !ok {
continue
}
if entry["id"] == modelID {
if model, ok := entry["model"].(string); ok {
return model
}
}
}
return ""
}
func runInApp(appName, modelName string) error {
app, ok := GetApp(appName)
if !ok {
return fmt.Errorf("unknown app: %s", appName)
}
if err := app.CheckInstall(); err != nil {
return err
}
if app.Setup != nil {
if err := app.Setup(modelName); err != nil {
return fmt.Errorf("setup failed: %w", err)
}
}
proc := exec.Command(app.Command, app.Args(modelName)...)
proc.Stdin = os.Stdin
proc.Stdout = os.Stdout
proc.Stderr = os.Stderr
proc.Env = os.Environ()
for _, env := range app.EnvVars(modelName) {
proc.Env = append(proc.Env, fmt.Sprintf("%s=%s", env.Name, env.Value))
}
fmt.Fprintf(os.Stderr, "Launching %s with %s...\n", app.DisplayName, modelName)
return proc.Run()
}
func ConnectCmd() *cobra.Command {
var modelName string
cmd := &cobra.Command{
Use: "connect [APP]",
Short: "Configure an external app to use Ollama",
Long: `Configure an external application to use Ollama as its backend.
Supported apps:
claude Claude Code
droid Droid
opencode OpenCode
Examples:
ollama connect
ollama connect claude
ollama connect claude --model llama3.2`,
Args: cobra.MaximumNArgs(1),
PreRunE: checkServerHeartbeat,
RunE: func(cmd *cobra.Command, args []string) error {
var appName string
if len(args) > 0 {
appName = args[0]
} else {
var err error
appName, err = selectApp()
if err != nil {
return err
}
}
if _, ok := GetApp(appName); !ok {
return fmt.Errorf("unknown app: %s", appName)
}
if modelName == "" {
var err error
modelName, err = selectModelForConnect(cmd.Context(), "")
if err != nil {
return err
}
}
if err := SaveConnection(appName, modelName); err != nil {
return fmt.Errorf("failed to save: %w", err)
}
fmt.Fprintf(os.Stderr, "Added %s to %s\n", modelName, appName)
if launch, _ := confirmLaunch(appName); launch {
return runInApp(appName, modelName)
}
fmt.Fprintf(os.Stderr, "Run 'ollama launch %s' to start later\n", strings.ToLower(appName))
return nil
},
}
cmd.Flags().StringVar(&modelName, "model", "", "Model to use")
return cmd
}
func getAppConfiguredModel(appName string) string {
switch strings.ToLower(appName) {
case "opencode":
return getOpenCodeConfiguredModel()
case "droid":
return getDroidConfiguredModel()
default:
return ""
}
}
func LaunchCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "launch [APP]",
Short: "Launch a configured app",
Long: `Launch a configured application with Ollama as its backend.
If no app is specified, shows a list of configured apps to choose from.
If no apps have been configured, starts the connect flow.
Examples:
ollama launch
ollama launch claude`,
Args: cobra.MaximumNArgs(1),
PreRunE: checkServerHeartbeat,
RunE: func(cmd *cobra.Command, args []string) error {
var appName string
if len(args) > 0 {
appName = args[0]
} else {
selected, err := selectConnectedApp()
if err != nil {
return err
}
if selected == "" {
// No connected apps, start connect flow
fmt.Fprintf(os.Stderr, "No apps configured. Let's set one up.\n\n")
appName, err = selectApp()
if err != nil {
return err
}
modelName, err := selectModelForConnect(cmd.Context(), "")
if err != nil {
return err
}
if err := SaveConnection(appName, modelName); err != nil {
return fmt.Errorf("failed to save: %w", err)
}
fmt.Fprintf(os.Stderr, "Added %s to %s\n", modelName, appName)
return runInApp(appName, modelName)
}
appName = selected
}
app, ok := GetApp(appName)
if !ok {
return fmt.Errorf("unknown app: %s", appName)
}
// Check app's own config first
modelName := getAppConfiguredModel(appName)
// Fall back to our saved connection config
if modelName == "" {
config, err := LoadConnection(appName)
if err != nil {
if os.IsNotExist(err) {
// No config, drop into connect flow
modelName, err = selectModelForConnect(cmd.Context(), "")
if err != nil {
return err
}
if err := SaveConnection(appName, modelName); err != nil {
return fmt.Errorf("failed to save: %w", err)
}
fmt.Fprintf(os.Stderr, "Added %s to %s\n", modelName, appName)
} else {
return err
}
} else {
modelName = config.Model
}
}
return runInApp(app.Name, modelName)
},
}
return cmd
}

View File

@@ -1,189 +0,0 @@
package cmd
import (
"encoding/json"
"os"
"path/filepath"
"testing"
)
func TestSetupOpenCodeSettings(t *testing.T) {
tmpDir := t.TempDir()
origHome := os.Getenv("HOME")
os.Setenv("HOME", tmpDir)
defer os.Setenv("HOME", origHome)
configDir := filepath.Join(tmpDir, ".config", "opencode")
configPath := filepath.Join(configDir, "opencode.json")
stateDir := filepath.Join(tmpDir, ".local", "state", "opencode")
statePath := filepath.Join(stateDir, "model.json")
cleanup := func() {
os.RemoveAll(configDir)
os.RemoveAll(stateDir)
}
t.Run("fresh install", func(t *testing.T) {
cleanup()
if err := setupOpenCodeSettings("llama3.2"); err != nil {
t.Fatal(err)
}
assertOpenCodeModelExists(t, configPath, "llama3.2")
assertOpenCodeRecentModel(t, statePath, 0, "ollama", "llama3.2")
})
t.Run("preserve other providers", func(t *testing.T) {
cleanup()
os.MkdirAll(configDir, 0755)
os.WriteFile(configPath, []byte(`{"provider":{"anthropic":{"apiKey":"xxx"}}}`), 0644)
if err := setupOpenCodeSettings("llama3.2"); err != nil {
t.Fatal(err)
}
data, _ := os.ReadFile(configPath)
var cfg map[string]any
json.Unmarshal(data, &cfg)
provider := cfg["provider"].(map[string]any)
if provider["anthropic"] == nil {
t.Error("anthropic provider was removed")
}
assertOpenCodeModelExists(t, configPath, "llama3.2")
})
t.Run("preserve other models", func(t *testing.T) {
cleanup()
os.MkdirAll(configDir, 0755)
os.WriteFile(configPath, []byte(`{"provider":{"ollama":{"models":{"mistral":{"name":"Mistral"}}}}}`), 0644)
if err := setupOpenCodeSettings("llama3.2"); err != nil {
t.Fatal(err)
}
assertOpenCodeModelExists(t, configPath, "mistral")
assertOpenCodeModelExists(t, configPath, "llama3.2")
})
t.Run("update existing model", func(t *testing.T) {
cleanup()
setupOpenCodeSettings("llama3.2")
setupOpenCodeSettings("llama3.2")
assertOpenCodeModelExists(t, configPath, "llama3.2")
})
t.Run("preserve top-level keys", func(t *testing.T) {
cleanup()
os.MkdirAll(configDir, 0755)
os.WriteFile(configPath, []byte(`{"theme":"dark","keybindings":{}}`), 0644)
if err := setupOpenCodeSettings("llama3.2"); err != nil {
t.Fatal(err)
}
data, _ := os.ReadFile(configPath)
var cfg map[string]any
json.Unmarshal(data, &cfg)
if cfg["theme"] != "dark" {
t.Error("theme was removed")
}
if cfg["keybindings"] == nil {
t.Error("keybindings was removed")
}
})
t.Run("model state - insert at index 0", func(t *testing.T) {
cleanup()
os.MkdirAll(stateDir, 0755)
os.WriteFile(statePath, []byte(`{"recent":[{"providerID":"anthropic","modelID":"claude"}],"favorite":[],"variant":{}}`), 0644)
if err := setupOpenCodeSettings("llama3.2"); err != nil {
t.Fatal(err)
}
assertOpenCodeRecentModel(t, statePath, 0, "ollama", "llama3.2")
assertOpenCodeRecentModel(t, statePath, 1, "anthropic", "claude")
})
t.Run("model state - preserve favorites and variants", func(t *testing.T) {
cleanup()
os.MkdirAll(stateDir, 0755)
os.WriteFile(statePath, []byte(`{"recent":[],"favorite":[{"providerID":"x","modelID":"y"}],"variant":{"a":"b"}}`), 0644)
if err := setupOpenCodeSettings("llama3.2"); err != nil {
t.Fatal(err)
}
data, _ := os.ReadFile(statePath)
var state map[string]any
json.Unmarshal(data, &state)
if len(state["favorite"].([]any)) != 1 {
t.Error("favorite was modified")
}
if state["variant"].(map[string]any)["a"] != "b" {
t.Error("variant was modified")
}
})
t.Run("model state - deduplicate on re-add", func(t *testing.T) {
cleanup()
os.MkdirAll(stateDir, 0755)
os.WriteFile(statePath, []byte(`{"recent":[{"providerID":"ollama","modelID":"llama3.2"},{"providerID":"anthropic","modelID":"claude"}],"favorite":[],"variant":{}}`), 0644)
if err := setupOpenCodeSettings("llama3.2"); err != nil {
t.Fatal(err)
}
data, _ := os.ReadFile(statePath)
var state map[string]any
json.Unmarshal(data, &state)
recent := state["recent"].([]any)
if len(recent) != 2 {
t.Errorf("expected 2 recent entries, got %d", len(recent))
}
assertOpenCodeRecentModel(t, statePath, 0, "ollama", "llama3.2")
})
}
func assertOpenCodeModelExists(t *testing.T, path, model string) {
t.Helper()
data, err := os.ReadFile(path)
if err != nil {
t.Fatal(err)
}
var cfg map[string]any
if err := json.Unmarshal(data, &cfg); err != nil {
t.Fatal(err)
}
provider, ok := cfg["provider"].(map[string]any)
if !ok {
t.Fatal("provider not found")
}
ollama, ok := provider["ollama"].(map[string]any)
if !ok {
t.Fatal("ollama provider not found")
}
models, ok := ollama["models"].(map[string]any)
if !ok {
t.Fatal("models not found")
}
if models[model] == nil {
t.Errorf("model %s not found", model)
}
}
func assertOpenCodeRecentModel(t *testing.T, path string, index int, providerID, modelID string) {
t.Helper()
data, err := os.ReadFile(path)
if err != nil {
t.Fatal(err)
}
var state map[string]any
if err := json.Unmarshal(data, &state); err != nil {
t.Fatal(err)
}
recent, ok := state["recent"].([]any)
if !ok {
t.Fatal("recent not found")
}
if index >= len(recent) {
t.Fatalf("index %d out of range (len=%d)", index, len(recent))
}
entry, ok := recent[index].(map[string]any)
if !ok {
t.Fatal("entry is not a map")
}
if entry["providerID"] != providerID {
t.Errorf("expected providerID %s, got %s", providerID, entry["providerID"])
}
if entry["modelID"] != modelID {
t.Errorf("expected modelID %s, got %s", modelID, entry["modelID"])
}
}

View File

@@ -1945,8 +1945,6 @@ func NewCLI() *cobra.Command {
copyCmd,
deleteCmd,
runnerCmd,
ConnectCmd(),
LaunchCmd(),
)
return rootCmd

View File

@@ -1,99 +0,0 @@
package cmd
import (
"encoding/json"
"os"
"path/filepath"
"time"
)
type ConnectionConfig struct {
App string `json:"app"`
Model string `json:"model"`
ConfiguredAt time.Time `json:"configured_at"`
}
func configPath(appName string) (string, error) {
home, err := os.UserHomeDir()
if err != nil {
return "", err
}
return filepath.Join(home, ".ollama", "connections", appName+".json"), nil
}
func SaveConnection(appName, model string) error {
path, err := configPath(appName)
if err != nil {
return err
}
if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {
return err
}
data, err := json.MarshalIndent(ConnectionConfig{
App: appName,
Model: model,
ConfiguredAt: time.Now(),
}, "", " ")
if err != nil {
return err
}
return os.WriteFile(path, data, 0644)
}
func LoadConnection(appName string) (*ConnectionConfig, error) {
path, err := configPath(appName)
if err != nil {
return nil, err
}
data, err := os.ReadFile(path)
if err != nil {
return nil, err
}
var config ConnectionConfig
if err := json.Unmarshal(data, &config); err != nil {
return nil, err
}
return &config, nil
}
func ListConnections() ([]ConnectionConfig, error) {
home, err := os.UserHomeDir()
if err != nil {
return nil, err
}
dir := filepath.Join(home, ".ollama", "connections")
entries, err := os.ReadDir(dir)
if err != nil {
if os.IsNotExist(err) {
return nil, nil
}
return nil, err
}
var configs []ConnectionConfig
for _, entry := range entries {
if entry.IsDir() || filepath.Ext(entry.Name()) != ".json" {
continue
}
data, err := os.ReadFile(filepath.Join(dir, entry.Name()))
if err != nil {
continue
}
var config ConnectionConfig
if err := json.Unmarshal(data, &config); err != nil {
continue
}
configs = append(configs, config)
}
return configs, nil
}

View File

@@ -37,8 +37,6 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
fmt.Fprintln(os.Stderr, " /load <model> Load a session or model")
fmt.Fprintln(os.Stderr, " /save <model> Save your current session")
fmt.Fprintln(os.Stderr, " /clear Clear session context")
fmt.Fprintln(os.Stderr, " /connect Configure an external app to use Ollama")
fmt.Fprintln(os.Stderr, " /launch [app] Launch a configured app")
fmt.Fprintln(os.Stderr, " /bye Exit")
fmt.Fprintln(os.Stderr, " /?, /help Help for a command")
fmt.Fprintln(os.Stderr, " /? shortcuts Help for keyboard shortcuts")
@@ -462,136 +460,6 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
}
case strings.HasPrefix(line, "/exit"), strings.HasPrefix(line, "/bye"):
return nil
case strings.HasPrefix(line, "/connect"):
args := strings.Fields(line)
var appName string
if len(args) > 1 {
appName = args[1]
} else {
var err error
appName, err = selectApp()
if err != nil {
fmt.Printf("error: %v\n", err)
continue
}
}
if _, ok := GetApp(appName); !ok {
fmt.Printf("Unknown app: %s\n", appName)
continue
}
modelName, err := selectModelForConnect(cmd.Context(), opts.Model)
if err != nil {
fmt.Printf("error: %v\n", err)
continue
}
if err := SaveConnection(appName, modelName); err != nil {
fmt.Printf("error: %v\n", err)
continue
}
fmt.Fprintf(os.Stderr, "Added %s to %s\n", modelName, appName)
if launch, _ := confirmLaunch(appName); launch {
if err := runInApp(appName, modelName); err != nil {
fmt.Printf("error: %v\n", err)
}
}
continue
case strings.HasPrefix(line, "/launch"):
args := strings.Fields(line)
var appName string
if len(args) >= 2 {
appName = args[1]
} else {
selected, err := selectConnectedApp()
if err != nil {
fmt.Printf("error: %v\n", err)
continue
}
if selected == "" {
// No connected apps, start connect flow
fmt.Fprintf(os.Stderr, "No apps configured. Let's set one up.\n\n")
appName, err = selectApp()
if err != nil {
fmt.Printf("error: %v\n", err)
continue
}
modelName, err := selectModelForConnect(cmd.Context(), opts.Model)
if err != nil {
fmt.Printf("error: %v\n", err)
continue
}
if err := SaveConnection(appName, modelName); err != nil {
fmt.Printf("error: %v\n", err)
continue
}
fmt.Fprintf(os.Stderr, "Added %s to %s\n", modelName, appName)
if err := runInApp(appName, modelName); err != nil {
fmt.Printf("error: %v\n", err)
}
continue
}
appName = selected
}
app, ok := GetApp(appName)
if !ok {
fmt.Printf("Unknown app: %s\n", appName)
continue
}
// Check app's own config first
modelName := getAppConfiguredModel(appName)
// Fall back to our saved connection config
if modelName == "" {
config, err := LoadConnection(appName)
if err != nil {
if os.IsNotExist(err) {
// No config, drop into connect flow
modelName, err = selectModelForConnect(cmd.Context(), opts.Model)
if err != nil {
fmt.Printf("error: %v\n", err)
continue
}
if err := SaveConnection(appName, modelName); err != nil {
fmt.Printf("error: %v\n", err)
continue
}
fmt.Fprintf(os.Stderr, "Added %s to %s\n", modelName, appName)
} else {
fmt.Printf("error: %v\n", err)
continue
}
} else {
modelName = config.Model
}
}
// If running model differs from configured, offer to switch
if opts.Model != "" && modelName != opts.Model {
if switchModel, _ := confirmPrompt(fmt.Sprintf("Switch %s to use %s?", app.DisplayName, opts.Model)); switchModel {
modelName = opts.Model
if err := SaveConnection(appName, modelName); err != nil {
fmt.Printf("error: %v\n", err)
continue
}
fmt.Fprintf(os.Stderr, "Updated %s to %s\n", appName, modelName)
}
}
if err := runInApp(app.Name, modelName); err != nil {
fmt.Printf("error: %v\n", err)
}
continue
case strings.HasPrefix(line, "/"):
args := strings.Fields(line)
isFile := false

View File

@@ -1,328 +0,0 @@
package cmd
import (
"context"
"errors"
"fmt"
"os"
"sort"
"strings"
"time"
"golang.org/x/term"
"github.com/ollama/ollama/api"
)
const maxDisplayedItems = 10
var AppOrder = []string{"claude", "opencode", "droid"}
type SelectItem struct {
Name string
Description string
}
func Select(prompt string, items []SelectItem) (string, error) {
if len(items) == 0 {
return "", fmt.Errorf("no items to select from")
}
fd := int(os.Stdin.Fd())
oldState, err := term.MakeRaw(fd)
if err != nil {
return "", err
}
defer term.Restore(fd, oldState)
fmt.Fprint(os.Stderr, "\033[?25l")
defer fmt.Fprint(os.Stderr, "\033[?25h")
var filter string
selected := 0
scrollOffset := 0
var lastLineCount int
render := func() {
filtered := filterItems(items, filter)
if lastLineCount > 0 {
fmt.Fprintf(os.Stderr, "\033[%dA", lastLineCount)
}
fmt.Fprint(os.Stderr, "\033[J")
fmt.Fprintf(os.Stderr, "%s %s\r\n", prompt, filter)
lineCount := 1
if len(filtered) == 0 {
fmt.Fprintf(os.Stderr, " \033[37m(no matches)\033[0m\r\n")
lineCount++
} else {
displayCount := min(len(filtered), maxDisplayedItems)
for i := 0; i < displayCount; i++ {
idx := scrollOffset + i
if idx >= len(filtered) {
break
}
item := filtered[idx]
if idx == selected {
if item.Description != "" {
fmt.Fprintf(os.Stderr, " \033[1m> %s\033[0m \033[37m- %s\033[0m\r\n", item.Name, item.Description)
} else {
fmt.Fprintf(os.Stderr, " \033[1m> %s\033[0m\r\n", item.Name)
}
} else {
if item.Description != "" {
fmt.Fprintf(os.Stderr, " %s \033[37m- %s\033[0m\r\n", item.Name, item.Description)
} else {
fmt.Fprintf(os.Stderr, " %s\r\n", item.Name)
}
}
lineCount++
}
if remaining := len(filtered) - scrollOffset - displayCount; remaining > 0 {
fmt.Fprintf(os.Stderr, " \033[37m... and %d more\033[0m\r\n", remaining)
lineCount++
}
}
lastLineCount = lineCount
}
clearUI := func() {
if lastLineCount > 0 {
fmt.Fprintf(os.Stderr, "\033[%dA", lastLineCount)
fmt.Fprint(os.Stderr, "\033[J")
}
}
render()
buf := make([]byte, 3)
for {
n, err := os.Stdin.Read(buf)
if err != nil {
return "", err
}
filtered := filterItems(items, filter)
switch {
case n == 1 && buf[0] == 13: // Enter
if len(filtered) > 0 && selected < len(filtered) {
clearUI()
return filtered[selected].Name, nil
}
case n == 1 && (buf[0] == 3 || buf[0] == 27): // Ctrl+C or Escape
clearUI()
return "", fmt.Errorf("cancelled")
case n == 1 && buf[0] == 127: // Backspace
if len(filter) > 0 {
filter = filter[:len(filter)-1]
selected = 0
scrollOffset = 0
}
case n == 3 && buf[0] == 27 && buf[1] == 91: // Arrow keys
if buf[2] == 65 && selected > 0 { // Up
selected--
if selected < scrollOffset {
scrollOffset = selected
}
} else if buf[2] == 66 && selected < len(filtered)-1 { // Down
selected++
if selected >= scrollOffset+maxDisplayedItems {
scrollOffset = selected - maxDisplayedItems + 1
}
}
case n == 1 && buf[0] >= 32 && buf[0] < 127: // Printable chars
filter += string(buf[0])
selected = 0
scrollOffset = 0
}
render()
}
}
func filterItems(items []SelectItem, filter string) []SelectItem {
if filter == "" {
return items
}
var result []SelectItem
filterLower := strings.ToLower(filter)
for _, item := range items {
if strings.Contains(strings.ToLower(item.Name), filterLower) {
result = append(result, item)
}
}
return result
}
func selectApp() (string, error) {
var items []SelectItem
for _, name := range AppOrder {
app, ok := AppRegistry[name]
if !ok {
continue
}
items = append(items, SelectItem{Name: app.Name, Description: app.DisplayName})
}
if len(items) == 0 {
return "", fmt.Errorf("no apps available")
}
return Select("Select app:", items)
}
func selectConnectedApp() (string, error) {
connections, err := ListConnections()
if err != nil {
return "", err
}
if len(connections) == 0 {
return "", nil
}
var items []SelectItem
for _, conn := range connections {
app, ok := GetApp(conn.App)
if !ok {
continue
}
items = append(items, SelectItem{
Name: app.Name,
Description: fmt.Sprintf("%s (%s)", app.DisplayName, conn.Model),
})
}
if len(items) == 0 {
return "", nil
}
return Select("Select app to launch:", items)
}
func confirmLaunch(appName string) (bool, error) {
return confirmPrompt(fmt.Sprintf("Launch %s now?", appName))
}
func confirmPrompt(prompt string) (bool, error) {
fd := int(os.Stdin.Fd())
oldState, err := term.MakeRaw(fd)
if err != nil {
return false, err
}
defer term.Restore(fd, oldState)
fmt.Fprintf(os.Stderr, "%s [y/n] ", prompt)
buf := make([]byte, 1)
for {
if _, err := os.Stdin.Read(buf); err != nil {
return false, err
}
switch buf[0] {
case 'Y', 'y', 13:
fmt.Fprintf(os.Stderr, "yes\r\n")
return true, nil
case 'N', 'n', 27, 3:
fmt.Fprintf(os.Stderr, "no\r\n")
return false, nil
}
}
}
func selectModelForConnect(ctx context.Context, currentModel string) (string, error) {
client, err := api.ClientFromEnvironment()
if err != nil {
return "", err
}
models, err := client.List(ctx)
if err != nil {
return "", err
}
if len(models.Models) == 0 {
return "", fmt.Errorf("no models available. Run 'ollama pull <model>' first")
}
var items []SelectItem
cloudModels := make(map[string]bool)
for _, m := range models.Models {
items = append(items, SelectItem{Name: m.Name})
if m.RemoteModel != "" {
cloudModels[m.Name] = true
}
}
sort.Slice(items, func(i, j int) bool {
return strings.ToLower(items[i].Name) < strings.ToLower(items[j].Name)
})
if currentModel != "" {
for i, item := range items {
if item.Name == currentModel {
items = append([]SelectItem{item}, append(items[:i], items[i+1:]...)...)
break
}
}
}
selected, err := Select("Select model:", items)
if err != nil {
return "", err
}
if cloudModels[selected] {
if err := ensureSignedIn(ctx, client); err != nil {
return "", err
}
}
return selected, nil
}
func ensureSignedIn(ctx context.Context, client *api.Client) error {
user, err := client.Whoami(ctx)
if err == nil && user != nil && user.Name != "" {
return nil
}
var aErr api.AuthorizationError
if !errors.As(err, &aErr) || aErr.SigninURL == "" {
return err
}
yes, err := confirmPrompt("Sign in to ollama.com?")
if err != nil || !yes {
return fmt.Errorf("sign in required for cloud models")
}
fmt.Fprintf(os.Stderr, "\nTo sign in, navigate to:\n %s\n\n", aErr.SigninURL)
fmt.Fprintf(os.Stderr, "\033[90mwaiting for sign in to complete...\033[0m")
ticker := time.NewTicker(2 * time.Second)
defer ticker.Stop()
for {
select {
case <-ctx.Done():
fmt.Fprintf(os.Stderr, "\n")
return ctx.Err()
case <-ticker.C:
user, err := client.Whoami(ctx)
if err == nil && user != nil && user.Name != "" {
fmt.Fprintf(os.Stderr, "\r\033[K\033[A\r\033[K\033[1msigned in:\033[0m %s\n", user.Name)
return nil
}
fmt.Fprintf(os.Stderr, ".")
}
}
}

View File

@@ -73,7 +73,7 @@ _build_darwin() {
MLX_CGO_CFLAGS="-O3 -I$(pwd)/$BUILD_DIR/_deps/mlx-c-src -mmacosx-version-min=14.0"
MLX_CGO_LDFLAGS="-L$(pwd)/$BUILD_DIR/lib/ollama -lmlxc -lmlx -Wl,-rpath,@executable_path -lc++ -framework Metal -framework Foundation -framework Accelerate -mmacosx-version-min=14.0"
fi
GOOS=darwin GOARCH=$ARCH CGO_ENABLED=1 CGO_CFLAGS="$MLX_CGO_CFLAGS" CGO_LDFLAGS="$MLX_CGO_LDFLAGS" go build -tags mlx -o $INSTALL_PREFIX/imagegen ./x/imagegen/cmd/engine
GOOS=darwin GOARCH=$ARCH CGO_ENABLED=1 CGO_CFLAGS="$MLX_CGO_CFLAGS" CGO_LDFLAGS="$MLX_CGO_LDFLAGS" go build -tags mlx -o $INSTALL_PREFIX/ollama-mlx .
GOOS=darwin GOARCH=$ARCH CGO_ENABLED=1 go build -o $INSTALL_PREFIX .
done
}
@@ -82,19 +82,19 @@ _sign_darwin() {
status "Creating universal binary..."
mkdir -p dist/darwin
lipo -create -output dist/darwin/ollama dist/darwin-*/ollama
lipo -create -output dist/darwin/imagegen dist/darwin-*/imagegen
lipo -create -output dist/darwin/ollama-mlx dist/darwin-*/ollama-mlx
chmod +x dist/darwin/ollama
chmod +x dist/darwin/imagegen
chmod +x dist/darwin/ollama-mlx
if [ -n "$APPLE_IDENTITY" ]; then
for F in dist/darwin/ollama dist/darwin-*/lib/ollama/* dist/darwin/imagegen; do
for F in dist/darwin/ollama dist/darwin-*/lib/ollama/* dist/darwin/ollama-mlx; do
codesign -f --timestamp -s "$APPLE_IDENTITY" --identifier ai.ollama.ollama --options=runtime $F
done
# create a temporary zip for notarization
TEMP=$(mktemp -u).zip
ditto -c -k --keepParent dist/darwin/ollama "$TEMP"
xcrun notarytool submit "$TEMP" --wait --timeout 10m --apple-id $APPLE_ID --password $APPLE_PASSWORD --team-id $APPLE_TEAM_ID
xcrun notarytool submit "$TEMP" --wait --timeout 20m --apple-id $APPLE_ID --password $APPLE_PASSWORD --team-id $APPLE_TEAM_ID
rm -f "$TEMP"
fi
@@ -154,23 +154,25 @@ _build_macapp() {
mkdir -p dist/Ollama.app/Contents/Resources
if [ -d dist/darwin-amd64 ]; then
lipo -create -output dist/Ollama.app/Contents/Resources/ollama dist/darwin-amd64/ollama dist/darwin-arm64/ollama
lipo -create -output dist/Ollama.app/Contents/Resources/imagegen dist/darwin-amd64/imagegen dist/darwin-arm64/imagegen
lipo -create -output dist/Ollama.app/Contents/Resources/ollama-mlx dist/darwin-amd64/ollama-mlx dist/darwin-arm64/ollama-mlx
for F in dist/darwin-amd64/lib/ollama/*mlx*.dylib ; do
lipo -create -output dist/darwin/$(basename $F) $F dist/darwin-arm64/lib/ollama/$(basename $F)
done
cp dist/darwin-*/lib/ollama/*.so dist/darwin-*/lib/ollama/*.dylib dist/Ollama.app/Contents/Resources/
cp dist/darwin/*.dylib dist/Ollama.app/Contents/Resources/
# Copy MLX metallib (architecture-independent, just use arm64 version)
cp dist/darwin-arm64/lib/ollama/*.metallib dist/Ollama.app/Contents/Resources/ 2>/dev/null || true
else
cp -a dist/darwin/ollama dist/Ollama.app/Contents/Resources/ollama
cp dist/darwin/*.so dist/darwin/*.dylib dist/Ollama.app/Contents/Resources/
fi
cp -a dist/darwin/imagegen dist/Ollama.app/Contents/Resources/imagegen
cp -a dist/darwin/ollama-mlx dist/Ollama.app/Contents/Resources/ollama-mlx
chmod a+x dist/Ollama.app/Contents/Resources/ollama
# Sign
if [ -n "$APPLE_IDENTITY" ]; then
codesign -f --timestamp -s "$APPLE_IDENTITY" --identifier ai.ollama.ollama --options=runtime dist/Ollama.app/Contents/Resources/ollama
for lib in dist/Ollama.app/Contents/Resources/*.so dist/Ollama.app/Contents/Resources/*.dylib dist/Ollama.app/Contents/Resources/imagegen ; do
for lib in dist/Ollama.app/Contents/Resources/*.so dist/Ollama.app/Contents/Resources/*.dylib dist/Ollama.app/Contents/Resources/*.metallib dist/Ollama.app/Contents/Resources/ollama-mlx ; do
codesign -f --timestamp -s "$APPLE_IDENTITY" --identifier ai.ollama.ollama --options=runtime ${lib}
done
codesign -f --timestamp -s "$APPLE_IDENTITY" --identifier com.electron.ollama --deep --options=runtime dist/Ollama.app
@@ -178,11 +180,11 @@ _build_macapp() {
rm -f dist/Ollama-darwin.zip
ditto -c -k --keepParent dist/Ollama.app dist/Ollama-darwin.zip
(cd dist/Ollama.app/Contents/Resources/; tar -cf - ollama imagegen *.so *.dylib) | gzip -9vc > dist/ollama-darwin.tgz
(cd dist/Ollama.app/Contents/Resources/; tar -cf - ollama ollama-mlx *.so *.dylib *.metallib 2>/dev/null) | gzip -9vc > dist/ollama-darwin.tgz
# Notarize and Staple
if [ -n "$APPLE_IDENTITY" ]; then
$(xcrun -f notarytool) submit dist/Ollama-darwin.zip --wait --timeout 10m --apple-id "$APPLE_ID" --password "$APPLE_PASSWORD" --team-id "$APPLE_TEAM_ID"
$(xcrun -f notarytool) submit dist/Ollama-darwin.zip --wait --timeout 20m --apple-id "$APPLE_ID" --password "$APPLE_PASSWORD" --team-id "$APPLE_TEAM_ID"
rm -f dist/Ollama-darwin.zip
$(xcrun -f stapler) staple dist/Ollama.app
ditto -c -k --keepParent dist/Ollama.app dist/Ollama-darwin.zip
@@ -206,7 +208,7 @@ _build_macapp() {
rm -f dist/rw*.dmg
codesign -f --timestamp -s "$APPLE_IDENTITY" --identifier ai.ollama.ollama --options=runtime dist/Ollama.dmg
$(xcrun -f notarytool) submit dist/Ollama.dmg --wait --timeout 10m --apple-id "$APPLE_ID" --password "$APPLE_PASSWORD" --team-id "$APPLE_TEAM_ID"
$(xcrun -f notarytool) submit dist/Ollama.dmg --wait --timeout 20m --apple-id "$APPLE_ID" --password "$APPLE_PASSWORD" --team-id "$APPLE_TEAM_ID"
$(xcrun -f stapler) staple dist/Ollama.dmg
else
echo "WARNING: Code signing disabled, this bundle will not work for upgrade testing"

View File

@@ -48,53 +48,12 @@ if echo $PLATFORM | grep "amd64" > /dev/null; then
.
fi
# Deduplicate CUDA libraries across mlx_* and cuda_* directories
deduplicate_cuda_libs() {
local base_dir="$1"
echo "Deduplicating CUDA libraries in ${base_dir}..."
# Find all mlx_cuda_* directories
for mlx_dir in "${base_dir}"/lib/ollama/mlx_cuda_*; do
[ -d "${mlx_dir}" ] || continue
# Extract CUDA version (e.g., v12, v13)
cuda_version=$(basename "${mlx_dir}" | sed 's/mlx_cuda_//')
cuda_dir="${base_dir}/lib/ollama/cuda_${cuda_version}"
# Skip if corresponding cuda_* directory doesn't exist
[ -d "${cuda_dir}" ] || continue
echo " Checking ${mlx_dir} against ${cuda_dir}..."
# Find all .so* files in mlx directory
find "${mlx_dir}" -type f -name "*.so*" | while read mlx_file; do
filename=$(basename "${mlx_file}")
cuda_file="${cuda_dir}/${filename}"
# Skip if file doesn't exist in cuda directory
[ -f "${cuda_file}" ] || continue
# Compare checksums
mlx_sum=$(sha256sum "${mlx_file}" | awk '{print $1}')
cuda_sum=$(sha256sum "${cuda_file}" | awk '{print $1}')
if [ "${mlx_sum}" = "${cuda_sum}" ]; then
echo " Deduplicating ${filename}"
# Calculate relative path from mlx_dir to cuda_dir
rel_path="../cuda_${cuda_version}/${filename}"
rm -f "${mlx_file}"
ln -s "${rel_path}" "${mlx_file}"
fi
done
done
}
# Run deduplication for each platform output directory
if echo $PLATFORM | grep "," > /dev/null ; then
deduplicate_cuda_libs "./dist/linux_amd64"
deduplicate_cuda_libs "./dist/linux_arm64"
$(dirname $0)/deduplicate_cuda_libs.sh "./dist/linux_amd64"
$(dirname $0)/deduplicate_cuda_libs.sh "./dist/linux_arm64"
elif echo $PLATFORM | grep "amd64\|arm64" > /dev/null ; then
deduplicate_cuda_libs "./dist"
$(dirname $0)/deduplicate_cuda_libs.sh "./dist"
fi
# buildx behavior changes for single vs. multiplatform

View File

@@ -0,0 +1,60 @@
#!/bin/sh
#
# Deduplicate CUDA libraries across mlx_* and cuda_* directories
# This script finds identical .so* files in mlx_cuda_* directories that exist
# in corresponding cuda_* directories and replaces them with symlinks.
#
set -eu
if [ $# -eq 0 ]; then
echo "ERROR: No directory specified" >&2
echo "Usage: $0 <base_directory>" >&2
exit 1
fi
base_dir="$1"
if [ ! -d "${base_dir}" ]; then
echo "ERROR: Directory ${base_dir} does not exist" >&2
exit 1
fi
echo "Deduplicating CUDA libraries in ${base_dir}..."
# Find all mlx_cuda_* directories
for mlx_dir in "${base_dir}"/lib/ollama/mlx_cuda_*; do
[ -d "${mlx_dir}" ] || continue
# Extract CUDA version (e.g., v12, v13)
cuda_version=$(basename "${mlx_dir}" | sed 's/mlx_cuda_//')
cuda_dir="${base_dir}/lib/ollama/cuda_${cuda_version}"
# Skip if corresponding cuda_* directory doesn't exist
[ -d "${cuda_dir}" ] || continue
echo " Checking ${mlx_dir} against ${cuda_dir}..."
# Find all .so* files in mlx directory
find "${mlx_dir}" -type f -name "*.so*" | while read mlx_file; do
filename=$(basename "${mlx_file}")
cuda_file="${cuda_dir}/${filename}"
# Skip if file doesn't exist in cuda directory
[ -f "${cuda_file}" ] || continue
# Compare checksums
mlx_sum=$(sha256sum "${mlx_file}" | awk '{print $1}')
cuda_sum=$(sha256sum "${cuda_file}" | awk '{print $1}')
if [ "${mlx_sum}" = "${cuda_sum}" ]; then
echo " Deduplicating ${filename}"
# Calculate relative path from mlx_dir to cuda_dir
rel_path="../cuda_${cuda_version}/${filename}"
rm -f "${mlx_file}"
ln -s "${rel_path}" "${mlx_file}"
fi
done
done
echo "Deduplication complete"

View File

@@ -95,11 +95,48 @@ func (p *blobDownloadPart) UnmarshalJSON(b []byte) error {
}
const (
numDownloadParts = 16
// numDownloadParts is the default number of concurrent download parts for standard downloads
numDownloadParts = 16
// numHFDownloadParts is the reduced number of concurrent download parts for HuggingFace
// downloads to avoid triggering rate limits (HTTP 429 errors). See GitHub issue #13297.
numHFDownloadParts = 4
minDownloadPartSize int64 = 100 * format.MegaByte
maxDownloadPartSize int64 = 1000 * format.MegaByte
)
// isHuggingFaceURL returns true if the URL is from a HuggingFace domain.
// This includes:
// - huggingface.co (main domain)
// - *.huggingface.co (subdomains like cdn-lfs.huggingface.co)
// - hf.co (shortlink domain)
// - *.hf.co (CDN domains like cdn-lfs.hf.co, cdn-lfs3.hf.co)
func isHuggingFaceURL(u *url.URL) bool {
if u == nil {
return false
}
host := strings.ToLower(u.Hostname())
return host == "huggingface.co" ||
strings.HasSuffix(host, ".huggingface.co") ||
host == "hf.co" ||
strings.HasSuffix(host, ".hf.co")
}
// getNumDownloadParts returns the number of concurrent download parts to use
// for the given URL. HuggingFace URLs use reduced concurrency (default 4) to
// avoid triggering rate limits. This can be overridden via the OLLAMA_HF_CONCURRENCY
// environment variable. For non-HuggingFace URLs, returns the standard concurrency (16).
func getNumDownloadParts(u *url.URL) int {
if isHuggingFaceURL(u) {
if v := os.Getenv("OLLAMA_HF_CONCURRENCY"); v != "" {
if n, err := strconv.Atoi(v); err == nil && n > 0 {
return n
}
}
return numHFDownloadParts
}
return numDownloadParts
}
func (p *blobDownloadPart) Name() string {
return strings.Join([]string{
p.blobDownload.Name, "partial", strconv.Itoa(p.N),
@@ -271,7 +308,11 @@ func (b *blobDownload) run(ctx context.Context, requestURL *url.URL, opts *regis
}
g, inner := errgroup.WithContext(ctx)
g.SetLimit(numDownloadParts)
concurrency := getNumDownloadParts(directURL)
if concurrency != numDownloadParts {
slog.Info(fmt.Sprintf("using reduced concurrency (%d) for HuggingFace download", concurrency))
}
g.SetLimit(concurrency)
for i := range b.Parts {
part := b.Parts[i]
if part.Completed.Load() == part.Size {

194
server/download_test.go Normal file
View File

@@ -0,0 +1,194 @@
package server
import (
"net/url"
"testing"
"github.com/stretchr/testify/assert"
)
func TestIsHuggingFaceURL(t *testing.T) {
tests := []struct {
name string
url string
expected bool
}{
{
name: "nil url",
url: "",
expected: false,
},
{
name: "huggingface.co main domain",
url: "https://huggingface.co/some/model",
expected: true,
},
{
name: "cdn-lfs.huggingface.co subdomain",
url: "https://cdn-lfs.huggingface.co/repos/abc/123",
expected: true,
},
{
name: "cdn-lfs3.hf.co CDN domain",
url: "https://cdn-lfs3.hf.co/repos/abc/123",
expected: true,
},
{
name: "hf.co shortlink domain",
url: "https://hf.co/model",
expected: true,
},
{
name: "uppercase HuggingFace domain",
url: "https://HUGGINGFACE.CO/model",
expected: true,
},
{
name: "mixed case HF domain",
url: "https://Cdn-Lfs.HF.Co/repos",
expected: true,
},
{
name: "ollama registry",
url: "https://registry.ollama.ai/v2/library/llama3",
expected: false,
},
{
name: "github.com",
url: "https://github.com/ollama/ollama",
expected: false,
},
{
name: "fake huggingface domain",
url: "https://nothuggingface.co/model",
expected: false,
},
{
name: "fake hf domain",
url: "https://nothf.co/model",
expected: false,
},
{
name: "huggingface in path not host",
url: "https://example.com/huggingface.co/model",
expected: false,
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
var u *url.URL
if tc.url != "" {
var err error
u, err = url.Parse(tc.url)
if err != nil {
t.Fatalf("failed to parse URL: %v", err)
}
}
got := isHuggingFaceURL(u)
assert.Equal(t, tc.expected, got)
})
}
}
func TestGetNumDownloadParts(t *testing.T) {
tests := []struct {
name string
url string
envValue string
expected int
description string
}{
{
name: "nil url returns default",
url: "",
envValue: "",
expected: numDownloadParts,
description: "nil URL should return standard concurrency",
},
{
name: "ollama registry returns default",
url: "https://registry.ollama.ai/v2/library/llama3",
envValue: "",
expected: numDownloadParts,
description: "Ollama registry should use standard concurrency",
},
{
name: "huggingface returns reduced default",
url: "https://huggingface.co/model/repo",
envValue: "",
expected: numHFDownloadParts,
description: "HuggingFace should use reduced concurrency",
},
{
name: "hf.co CDN returns reduced default",
url: "https://cdn-lfs3.hf.co/repos/abc/123",
envValue: "",
expected: numHFDownloadParts,
description: "HuggingFace CDN should use reduced concurrency",
},
{
name: "huggingface with env override",
url: "https://huggingface.co/model/repo",
envValue: "2",
expected: 2,
description: "OLLAMA_HF_CONCURRENCY should override default",
},
{
name: "huggingface with higher env override",
url: "https://huggingface.co/model/repo",
envValue: "8",
expected: 8,
description: "OLLAMA_HF_CONCURRENCY can be set higher than default",
},
{
name: "huggingface with invalid env (non-numeric)",
url: "https://huggingface.co/model/repo",
envValue: "invalid",
expected: numHFDownloadParts,
description: "Invalid OLLAMA_HF_CONCURRENCY should fall back to default",
},
{
name: "huggingface with invalid env (zero)",
url: "https://huggingface.co/model/repo",
envValue: "0",
expected: numHFDownloadParts,
description: "Zero OLLAMA_HF_CONCURRENCY should fall back to default",
},
{
name: "huggingface with invalid env (negative)",
url: "https://huggingface.co/model/repo",
envValue: "-1",
expected: numHFDownloadParts,
description: "Negative OLLAMA_HF_CONCURRENCY should fall back to default",
},
{
name: "non-huggingface ignores env",
url: "https://registry.ollama.ai/v2/library/llama3",
envValue: "2",
expected: numDownloadParts,
description: "OLLAMA_HF_CONCURRENCY should not affect non-HF URLs",
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
// Set or clear the environment variable
if tc.envValue != "" {
t.Setenv("OLLAMA_HF_CONCURRENCY", tc.envValue)
}
var u *url.URL
if tc.url != "" {
var err error
u, err = url.Parse(tc.url)
if err != nil {
t.Fatalf("failed to parse URL: %v", err)
}
}
got := getNumDownloadParts(u)
assert.Equal(t, tc.expected, got, tc.description)
})
}
}

View File

@@ -1,24 +1,50 @@
# Experimental Features
# Experimental Features
## MLX Backend
We're working on a new experimental backend based on the [MLX project](https://github.com/ml-explore/mlx)
Support is currently limited to MacOS and Linux with CUDA GPUs. We're looking to add support for Windows CUDA soon, and other GPU vendors. To build:
Support is currently limited to MacOS and Linux with CUDA GPUs. We're looking to add support for Windows CUDA soon, and other GPU vendors.
```
### Building ollama-mlx
The `ollama-mlx` binary is a separate build of Ollama with MLX support enabled. This enables experimental features like image generation.
#### macOS (Apple Silicon and Intel)
```bash
# Build MLX backend libraries
cmake --preset MLX
cmake --build --preset MLX --parallel
cmake --install build --component MLX
go build -tags mlx .
# Build ollama-mlx binary
go build -tags mlx -o ollama-mlx .
```
On linux, use the preset "MLX CUDA 13" or "MLX CUDA 12" to enable CUDA with the default Ollama NVIDIA GPU architectures enabled.
#### Linux (CUDA)
On Linux, use the preset "MLX CUDA 13" or "MLX CUDA 12" to enable CUDA with the default Ollama NVIDIA GPU architectures enabled:
```bash
# Build MLX backend libraries with CUDA support
cmake --preset 'MLX CUDA 13'
cmake --build --preset 'MLX CUDA 13' --parallel
cmake --install build --component MLX
# Build ollama-mlx binary
CGO_CFLAGS="-O3 -I$(pwd)/build/_deps/mlx-c-src" \
CGO_LDFLAGS="-L$(pwd)/build/lib/ollama -lmlxc -lmlx" \
go build -tags mlx -o ollama-mlx .
```
#### Using build scripts
The build scripts automatically create the `ollama-mlx` binary:
- **macOS**: `./scripts/build_darwin.sh` produces `dist/darwin/ollama-mlx`
- **Linux**: `./scripts/build_linux.sh` produces `ollama-mlx` in the output archives
## Image Generation
Based on the experimental MLX backend, we're working on adding imagegen support. After running the cmake commands above:
```
go build -o imagegen ./x/imagegen/cmd/engine
```
Image generation is built into the `ollama-mlx` binary. Run `ollama-mlx serve` to start the server with image generation support enabled.

View File

@@ -123,11 +123,6 @@ func RegisterFlags(cmd *cobra.Command) {
// Returns true if it handled the request, false if the caller should continue with normal flow.
// Supports flags: --width, --height, --steps, --seed, --negative
func RunCLI(cmd *cobra.Command, name string, prompt string, interactive bool, keepAlive *api.Duration) error {
// Verify it's a valid image gen model
if ResolveModelName(name) == "" {
return fmt.Errorf("unknown image generation model: %s", name)
}
// Get options from flags (with env var defaults)
opts := DefaultOptions()
if cmd != nil && cmd.Flags() != nil {
@@ -511,10 +506,7 @@ func displayImageInTerminal(imagePath string) bool {
// Send in chunks for large images
const chunkSize = 4096
for i := 0; i < len(encoded); i += chunkSize {
end := i + chunkSize
if end > len(encoded) {
end = len(encoded)
}
end := min(i+chunkSize, len(encoded))
chunk := encoded[i:end]
if i == 0 {

View File

@@ -14,7 +14,9 @@ import (
"os"
"os/exec"
"path/filepath"
"runtime"
"strconv"
"strings"
"sync"
"time"
@@ -70,7 +72,7 @@ func NewServer(modelName string) (*Server, error) {
port = rand.Intn(65535-49152) + 49152
}
// Get the ollama executable path
// Get the ollama-mlx executable path (in same directory as current executable)
exe, err := os.Executable()
if err != nil {
return nil, fmt.Errorf("unable to lookup executable path: %w", err)
@@ -78,11 +80,42 @@ func NewServer(modelName string) (*Server, error) {
if eval, err := filepath.EvalSymlinks(exe); err == nil {
exe = eval
}
mlxExe := filepath.Join(filepath.Dir(exe), "ollama-mlx")
// Spawn subprocess: ollama runner --image-engine --model <path> --port <port>
cmd := exec.Command(exe, "runner", "--image-engine", "--model", modelName, "--port", strconv.Itoa(port))
// Spawn subprocess: ollama-mlx runner --image-engine --model <path> --port <port>
cmd := exec.Command(mlxExe, "runner", "--image-engine", "--model", modelName, "--port", strconv.Itoa(port))
cmd.Env = os.Environ()
// On Linux, set LD_LIBRARY_PATH to include MLX library directories
if runtime.GOOS == "linux" {
// Build library paths: start with LibOllamaPath, then add any mlx_* subdirectories
libraryPaths := []string{ml.LibOllamaPath}
if mlxDirs, err := filepath.Glob(filepath.Join(ml.LibOllamaPath, "mlx_*")); err == nil {
libraryPaths = append(libraryPaths, mlxDirs...)
}
// Append existing LD_LIBRARY_PATH if set
if existingPath, ok := os.LookupEnv("LD_LIBRARY_PATH"); ok {
libraryPaths = append(libraryPaths, filepath.SplitList(existingPath)...)
}
pathEnvVal := strings.Join(libraryPaths, string(filepath.ListSeparator))
// Update or add LD_LIBRARY_PATH in cmd.Env
found := false
for i := range cmd.Env {
if strings.HasPrefix(cmd.Env[i], "LD_LIBRARY_PATH=") {
cmd.Env[i] = "LD_LIBRARY_PATH=" + pathEnvVal
found = true
break
}
}
if !found {
cmd.Env = append(cmd.Env, "LD_LIBRARY_PATH="+pathEnvVal)
}
slog.Debug("mlx subprocess library path", "LD_LIBRARY_PATH", pathEnvVal)
}
s := &Server{
cmd: cmd,
port: port,
@@ -113,7 +146,7 @@ func NewServer(modelName string) (*Server, error) {
}
}()
slog.Info("starting image runner subprocess", "model", modelName, "port", port)
slog.Info("starting ollama-mlx image runner subprocess", "exe", mlxExe, "model", modelName, "port", port)
if err := cmd.Start(); err != nil {
return nil, fmt.Errorf("failed to start image runner: %w", err)
}