Compare commits

..

1 Commits

Author SHA1 Message Date
Patrick Devine
db961934dc add modelpaths 2023-07-17 17:32:43 -07:00
1560 changed files with 59422 additions and 1206797 deletions

View File

@@ -1,11 +1,7 @@
build
llama/build
.venv
.vscode
ollama
app
macapp
dist
build
.env
.cache
test_data
.git
web

28
.gitattributes vendored
View File

@@ -1,28 +0,0 @@
llama/**/*.cpp linguist-vendored
llama/**/*.hpp linguist-vendored
llama/**/*.h linguist-vendored
llama/**/*.c linguist-vendored
llama/**/*.cu linguist-vendored
llama/**/*.cuh linguist-vendored
llama/**/*.m linguist-vendored
llama/**/*.metal linguist-vendored
ml/backend/**/*.c linguist-vendored
ml/backend/**/*.h linguist-vendored
ml/backend/**/*.cpp linguist-vendored
ml/backend/**/*.hpp linguist-vendored
ml/backend/**/*.cu linguist-vendored
ml/backend/**/*.cuh linguist-vendored
ml/backend/**/*.m linguist-vendored
ml/backend/**/*.metal linguist-vendored
ml/backend/**/*.comp linguist-vendored
ml/backend/**/*.glsl linguist-vendored
ml/backend/**/CMakeLists.txt linguist-vendored
app/webview linguist-vendored
llama/build-info.cpp linguist-generated
ml/backend/ggml/ggml/src/ggml-metal/ggml-metal-embed.s linguist-generated
* text=auto
*.go text eol=lf

View File

@@ -1,68 +0,0 @@
name: Bug report
labels: [bug]
description: Something isn't working right.
body:
- type: textarea
id: description
attributes:
label: What is the issue?
description: What happened? What did you expect to happen?
validations:
required: true
- type: textarea
id: logs
attributes:
label: Relevant log output
description: Please copy and paste any relevant log output. See [Troubleshooting Guide](https://github.com/ollama/ollama/blob/main/docs/troubleshooting.md#how-to-troubleshoot-issues) for details.
render: shell
validations:
required: false
- type: dropdown
id: os
attributes:
label: OS
description: Which operating system are you using?
multiple: true
options:
- Linux
- macOS
- Windows
- Docker
- WSL2
validations:
required: false
- type: dropdown
id: gpu
attributes:
label: GPU
description: Which GPU are you using?
multiple: true
options:
- Nvidia
- AMD
- Intel
- Apple
- Other
validations:
required: false
- type: dropdown
id: cpu
attributes:
label: CPU
description: Which CPU are you using?
multiple: true
options:
- Intel
- AMD
- Apple
- Other
validations:
required: false
- type: input
id: version
attributes:
label: Ollama version
description: What version of Ollama are you using? (`ollama --version`)
placeholder: e.g., 0.1.32
validations:
required: false

View File

@@ -1,6 +0,0 @@
---
name: Feature request
about: Request a new feature
labels: feature request
---

View File

@@ -1,5 +0,0 @@
---
name: Model request
about: Request support for a new model to be added to Ollama
labels: model request
---

View File

@@ -1,8 +0,0 @@
blank_issues_enabled: true
contact_links:
- name: Help
url: https://discord.com/invite/ollama
about: Please join our Discord server for help using Ollama
- name: Troubleshooting
url: https://github.com/ollama/ollama/blob/main/docs/faq.md#faq
about: See the FAQ for common issues and solutions

View File

@@ -1,24 +0,0 @@
name: latest
on:
release:
types: [released]
jobs:
update-latest:
environment: release
runs-on: linux
steps:
- uses: actions/checkout@v4
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ vars.DOCKER_USER }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
- name: Tag images as latest
env:
PUSH: "1"
shell: bash
run: |
export "VERSION=${GITHUB_REF_NAME#v}"
./scripts/tag_latest.sh

View File

@@ -1,544 +0,0 @@
name: release
on:
push:
tags:
- 'v*'
env:
CGO_CFLAGS: '-O3'
CGO_CXXFLAGS: '-O3'
jobs:
setup-environment:
runs-on: ubuntu-latest
environment: release
outputs:
GOFLAGS: ${{ steps.goflags.outputs.GOFLAGS }}
VERSION: ${{ steps.goflags.outputs.VERSION }}
vendorsha: ${{ steps.changes.outputs.vendorsha }}
steps:
- uses: actions/checkout@v4
- name: Set environment
id: goflags
run: |
echo GOFLAGS="'-ldflags=-w -s \"-X=github.com/ollama/ollama/version.Version=${GITHUB_REF_NAME#v}\" \"-X=github.com/ollama/ollama/server.mode=release\"'" | tee -a $GITHUB_OUTPUT
echo VERSION="${GITHUB_REF_NAME#v}" | tee -a $GITHUB_OUTPUT
echo vendorsha=$(make -f Makefile.sync print-base) | tee -a $GITHUB_OUTPUT
darwin-build:
runs-on: macos-14-xlarge
environment: release
needs: setup-environment
env:
GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }}
VERSION: ${{ needs.setup-environment.outputs.VERSION }}
APPLE_IDENTITY: ${{ secrets.APPLE_IDENTITY }}
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
APPLE_ID: ${{ vars.APPLE_ID }}
MACOS_SIGNING_KEY: ${{ secrets.MACOS_SIGNING_KEY }}
MACOS_SIGNING_KEY_PASSWORD: ${{ secrets.MACOS_SIGNING_KEY_PASSWORD }}
CGO_CFLAGS: '-mmacosx-version-min=14.0 -O3'
CGO_CXXFLAGS: '-mmacosx-version-min=14.0 -O3'
CGO_LDFLAGS: '-mmacosx-version-min=14.0 -O3'
steps:
- uses: actions/checkout@v4
- run: |
echo $MACOS_SIGNING_KEY | base64 --decode > certificate.p12
security create-keychain -p password build.keychain
security default-keychain -s build.keychain
security unlock-keychain -p password build.keychain
security import certificate.p12 -k build.keychain -P $MACOS_SIGNING_KEY_PASSWORD -T /usr/bin/codesign
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k password build.keychain
security set-keychain-settings -lut 3600 build.keychain
- uses: actions/setup-go@v5
with:
go-version-file: go.mod
cache-dependency-path: |
go.sum
Makefile.sync
- run: |
./scripts/build_darwin.sh
- name: Log build results
run: |
ls -l dist/
- uses: actions/upload-artifact@v4
with:
name: bundles-darwin
path: |
dist/*.tgz
dist/*.zip
dist/*.dmg
windows-depends:
strategy:
matrix:
os: [windows]
arch: [amd64]
preset: ['CPU']
include:
- os: windows
arch: amd64
preset: 'CUDA 12'
install: https://developer.download.nvidia.com/compute/cuda/12.8.0/local_installers/cuda_12.8.0_571.96_windows.exe
cuda-components:
- '"cudart"'
- '"nvcc"'
- '"cublas"'
- '"cublas_dev"'
cuda-version: '12.8'
flags: ''
- os: windows
arch: amd64
preset: 'CUDA 13'
install: https://developer.download.nvidia.com/compute/cuda/13.0.0/local_installers/cuda_13.0.0_windows.exe
cuda-components:
- '"cudart"'
- '"nvcc"'
- '"cublas"'
- '"cublas_dev"'
- '"crt"'
- '"nvvm"'
- '"nvptxcompiler"'
cuda-version: '13.0'
flags: ''
- os: windows
arch: amd64
preset: 'ROCm 6'
install: https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-24.Q4-WinSvr2022-For-HIP.exe
rocm-version: '6.2'
flags: '-DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_FLAGS="-parallel-jobs=4 -Wno-ignored-attributes -Wno-deprecated-pragma" -DCMAKE_CXX_FLAGS="-parallel-jobs=4 -Wno-ignored-attributes -Wno-deprecated-pragma"'
runner_dir: 'rocm'
- os: windows
arch: amd64
preset: Vulkan
install: https://sdk.lunarg.com/sdk/download/1.4.321.1/windows/vulkansdk-windows-X64-1.4.321.1.exe
flags: ''
runner_dir: 'vulkan'
runs-on: ${{ matrix.arch == 'arm64' && format('{0}-{1}', matrix.os, matrix.arch) || matrix.os }}
environment: release
env:
GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }}
steps:
- name: Install system dependencies
run: |
choco install -y --no-progress ccache ninja
ccache -o cache_dir=${{ github.workspace }}\.ccache
- if: startsWith(matrix.preset, 'CUDA ') || startsWith(matrix.preset, 'ROCm ') || startsWith(matrix.preset, 'Vulkan')
id: cache-install
uses: actions/cache/restore@v4
with:
path: |
C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA
C:\Program Files\AMD\ROCm
C:\VulkanSDK
key: ${{ matrix.install }}
- if: startsWith(matrix.preset, 'CUDA ')
name: Install CUDA ${{ matrix.cuda-version }}
run: |
$ErrorActionPreference = "Stop"
if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') {
Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe"
$subpackages = @(${{ join(matrix.cuda-components, ', ') }}) | Foreach-Object {"${_}_${{ matrix.cuda-version }}"}
Start-Process -FilePath .\install.exe -ArgumentList (@("-s") + $subpackages) -NoNewWindow -Wait
}
$cudaPath = (Resolve-Path "C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\*").path
echo "$cudaPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- if: startsWith(matrix.preset, 'ROCm')
name: Install ROCm ${{ matrix.rocm-version }}
run: |
$ErrorActionPreference = "Stop"
if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') {
Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe"
Start-Process -FilePath .\install.exe -ArgumentList '-install' -NoNewWindow -Wait
}
$hipPath = (Resolve-Path "C:\Program Files\AMD\ROCm\*").path
echo "$hipPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
echo "CC=$hipPath\bin\clang.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
echo "CXX=$hipPath\bin\clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
echo "HIPCXX=$hipPath\bin\clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
echo "HIP_PLATFORM=amd" | Out-File -FilePath $env:GITHUB_ENV -Append
echo "CMAKE_PREFIX_PATH=$hipPath" | Out-File -FilePath $env:GITHUB_ENV -Append
- if: matrix.preset == 'Vulkan'
name: Install Vulkan ${{ matrix.rocm-version }}
run: |
$ErrorActionPreference = "Stop"
if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') {
Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe"
Start-Process -FilePath .\install.exe -ArgumentList "-c","--am","--al","in" -NoNewWindow -Wait
}
$vulkanPath = (Resolve-Path "C:\VulkanSDK\*").path
echo "$vulkanPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
echo "VULKAN_SDK=$vulkanPath" >> $env:GITHUB_ENV
- if: matrix.preset == 'CPU'
run: |
echo "CC=clang.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
echo "CXX=clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
- if: ${{ !cancelled() && steps.cache-install.outputs.cache-hit != 'true' }}
uses: actions/cache/save@v4
with:
path: |
C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA
C:\Program Files\AMD\ROCm
C:\VulkanSDK
key: ${{ matrix.install }}
- uses: actions/checkout@v4
- uses: actions/cache@v4
with:
path: ${{ github.workspace }}\.ccache
key: ccache-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.preset }}-${{ needs.setup-environment.outputs.vendorsha }}
- name: Build target "${{ matrix.preset }}"
run: |
Import-Module 'C:\Program Files\Microsoft Visual Studio\2022\Enterprise\Common7\Tools\Microsoft.VisualStudio.DevShell.dll'
Enter-VsDevShell -VsInstallPath 'C:\Program Files\Microsoft Visual Studio\2022\Enterprise' -SkipAutomaticLocation -DevCmdArguments '-arch=x64 -no_logo'
cmake --preset "${{ matrix.preset }}" ${{ matrix.flags }} --install-prefix "$((pwd).Path)\dist\${{ matrix.os }}-${{ matrix.arch }}"
cmake --build --parallel ([Environment]::ProcessorCount) --preset "${{ matrix.preset }}"
cmake --install build --component "${{ startsWith(matrix.preset, 'CUDA ') && 'CUDA' || startsWith(matrix.preset, 'ROCm ') && 'HIP' || startsWith(matrix.preset, 'Vulkan') && 'Vulkan' || 'CPU' }}" --strip
Remove-Item -Path dist\lib\ollama\rocm\rocblas\library\*gfx906* -ErrorAction SilentlyContinue
env:
CMAKE_GENERATOR: Ninja
- name: Log build results
run: |
gci -path .\dist -Recurse -File | ForEach-Object { get-filehash -path $_.FullName -Algorithm SHA256 } | format-list
- uses: actions/upload-artifact@v4
with:
name: depends-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.preset }}
path: dist\*
windows-build:
strategy:
matrix:
os: [windows]
arch: [amd64, arm64]
include:
- os: windows
arch: amd64
llvmarch: x86_64
- os: windows
arch: arm64
llvmarch: aarch64
runs-on: ${{ matrix.arch == 'arm64' && format('{0}-{1}', matrix.os, matrix.arch) || matrix.os }}
environment: release
needs: [setup-environment]
env:
GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }}
VERSION: ${{ needs.setup-environment.outputs.VERSION }}
steps:
- name: Install ARM64 system dependencies
if: matrix.arch == 'arm64'
run: |
$ErrorActionPreference = "Stop"
Set-ExecutionPolicy Bypass -Scope Process -Force
[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072
iex ((New-Object System.Net.WebClient).DownloadString('https://community.chocolatey.org/install.ps1'))
echo "C:\ProgramData\chocolatey\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
Invoke-WebRequest -Uri https://aka.ms/vs/17/release/vc_redist.arm64.exe -OutFile "${{ runner.temp }}\vc_redist.arm64.exe"
Start-Process -FilePath "${{ runner.temp }}\vc_redist.arm64.exe" -ArgumentList @("/install", "/quiet", "/norestart") -NoNewWindow -Wait
choco install -y --no-progress git gzip
echo "C:\Program Files\Git\cmd" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- name: Install clang and gcc-compat
run: |
$ErrorActionPreference = "Stop"
Set-ExecutionPolicy Bypass -Scope Process -Force
Invoke-WebRequest -Uri "https://github.com/mstorsjo/llvm-mingw/releases/download/20240619/llvm-mingw-20240619-ucrt-${{ matrix.llvmarch }}.zip" -OutFile "${{ runner.temp }}\llvm-mingw-ucrt.zip"
Expand-Archive -Path ${{ runner.temp }}\llvm-mingw-ucrt.zip -DestinationPath "C:\Program Files\"
$installPath=(Resolve-Path -Path "C:\Program Files\llvm-mingw-*-ucrt*").path
echo "$installPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with:
go-version-file: go.mod
cache-dependency-path: |
go.sum
Makefile.sync
- name: Verify gcc is actually clang
run: |
$ErrorActionPreference='Continue'
$version=& gcc -v 2>&1
$version=$version -join "`n"
echo "gcc is $version"
if ($version -notmatch 'clang') {
echo "ERROR: GCC must be clang for proper utf16 handling"
exit 1
}
$ErrorActionPreference='Stop'
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: "20"
- run: |
./scripts/build_windows ollama app
- name: Log build results
run: |
gci -path .\dist -Recurse -File | ForEach-Object { get-filehash -path $_.FullName -Algorithm SHA256 } | format-list
- uses: actions/upload-artifact@v4
with:
name: build-${{ matrix.os }}-${{ matrix.arch }}
path: |
dist\*
windows-app:
runs-on: windows
environment: release
needs: [windows-build, windows-depends]
env:
GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }}
VERSION: ${{ needs.setup-environment.outputs.VERSION }}
KEY_CONTAINER: ${{ vars.KEY_CONTAINER }}
steps:
- uses: actions/checkout@v4
- uses: google-github-actions/auth@v2
with:
project_id: ollama
credentials_json: ${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}
- run: |
$ErrorActionPreference = "Stop"
Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${{ runner.temp }}\sdksetup.exe"
Start-Process "${{ runner.temp }}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait
Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${{ runner.temp }}\plugin.zip"
Expand-Archive -Path "${{ runner.temp }}\plugin.zip" -DestinationPath "${{ runner.temp }}\plugin\"
& "${{ runner.temp }}\plugin\*\kmscng.msi" /quiet
echo "${{ vars.OLLAMA_CERT }}" >ollama_inc.crt
- uses: actions/setup-go@v5
with:
go-version-file: go.mod
cache-dependency-path: |
go.sum
Makefile.sync
- uses: actions/download-artifact@v4
with:
pattern: depends-windows*
path: dist
merge-multiple: true
- uses: actions/download-artifact@v4
with:
pattern: build-windows*
path: dist
merge-multiple: true
- name: Log dist contents after download
run: |
gci -path .\dist -recurse
- run: |
./scripts/build_windows.ps1 deps sign installer zip
- name: Log contents after build
run: |
gci -path .\dist -Recurse -File | ForEach-Object { get-filehash -path $_.FullName -Algorithm SHA256 } | format-list
- uses: actions/upload-artifact@v4
with:
name: bundles-windows
path: |
dist/*.zip
dist/OllamaSetup.exe
linux-build:
strategy:
matrix:
include:
- os: linux
arch: amd64
target: archive
- os: linux
arch: amd64
target: rocm
- os: linux
arch: arm64
target: archive
runs-on: ${{ matrix.arch == 'arm64' && format('{0}-{1}', matrix.os, matrix.arch) || matrix.os }}
environment: release
needs: setup-environment
env:
GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }}
steps:
- uses: actions/checkout@v4
- uses: docker/setup-buildx-action@v3
- uses: docker/build-push-action@v6
with:
context: .
platforms: ${{ matrix.os }}/${{ matrix.arch }}
target: ${{ matrix.target }}
build-args: |
GOFLAGS=${{ env.GOFLAGS }}
CGO_CFLAGS=${{ env.CGO_CFLAGS }}
CGO_CXXFLAGS=${{ env.CGO_CXXFLAGS }}
outputs: type=local,dest=dist/${{ matrix.os }}-${{ matrix.arch }}
cache-from: type=registry,ref=${{ vars.DOCKER_REPO }}:latest
cache-to: type=inline
- run: |
for COMPONENT in bin/* lib/ollama/*; do
case "$COMPONENT" in
bin/ollama) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
lib/ollama/*.so*) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
lib/ollama/cuda_v*) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
lib/ollama/vulkan*) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}.tar.in ;;
lib/ollama/cuda_jetpack5) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}-jetpack5.tar.in ;;
lib/ollama/cuda_jetpack6) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}-jetpack6.tar.in ;;
lib/ollama/rocm) echo $COMPONENT >>ollama-${{ matrix.os }}-${{ matrix.arch }}-rocm.tar.in ;;
esac
done
working-directory: dist/${{ matrix.os }}-${{ matrix.arch }}
- run: |
echo "Manifests"
for ARCHIVE in dist/${{ matrix.os }}-${{ matrix.arch }}/*.tar.in ; do
echo $ARCHIVE
cat $ARCHIVE
done
- run: |
for ARCHIVE in dist/${{ matrix.os }}-${{ matrix.arch }}/*.tar.in; do
tar c -C dist/${{ matrix.os }}-${{ matrix.arch }} -T $ARCHIVE --owner 0 --group 0 | pigz -9vc >$(basename ${ARCHIVE//.*/}.tgz);
done
- uses: actions/upload-artifact@v4
with:
name: bundles-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.target }}
path: |
*.tgz
# Build each Docker variant (OS, arch, and flavor) separately. Using QEMU is unreliable and slower.
docker-build-push:
strategy:
matrix:
include:
- os: linux
arch: arm64
build-args: |
CGO_CFLAGS
CGO_CXXFLAGS
GOFLAGS
- os: linux
arch: amd64
build-args: |
CGO_CFLAGS
CGO_CXXFLAGS
GOFLAGS
- os: linux
arch: amd64
suffix: '-rocm'
build-args: |
CGO_CFLAGS
CGO_CXXFLAGS
GOFLAGS
FLAVOR=rocm
runs-on: ${{ matrix.arch == 'arm64' && format('{0}-{1}', matrix.os, matrix.arch) || matrix.os }}
environment: release
needs: setup-environment
env:
GOFLAGS: ${{ needs.setup-environment.outputs.GOFLAGS }}
steps:
- uses: actions/checkout@v4
- uses: docker/setup-buildx-action@v3
- uses: docker/login-action@v3
with:
username: ${{ vars.DOCKER_USER }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
- id: build-push
uses: docker/build-push-action@v6
with:
context: .
platforms: ${{ matrix.os }}/${{ matrix.arch }}
build-args: ${{ matrix.build-args }}
outputs: type=image,name=${{ vars.DOCKER_REPO }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=registry,ref=${{ vars.DOCKER_REPO }}:latest
cache-to: type=inline
- run: |
mkdir -p ${{ matrix.os }}-${{ matrix.arch }}
echo "${{ steps.build-push.outputs.digest }}" >${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.suffix }}.txt
working-directory: ${{ runner.temp }}
- uses: actions/upload-artifact@v4
with:
name: digest-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.suffix }}
path: |
${{ runner.temp }}/${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.suffix }}.txt
# Merge Docker images for the same flavor into a single multi-arch manifest
docker-merge-push:
strategy:
matrix:
suffix: ['', '-rocm']
runs-on: linux
environment: release
needs: [docker-build-push]
steps:
- uses: docker/login-action@v3
with:
username: ${{ vars.DOCKER_USER }}
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
- id: metadata
uses: docker/metadata-action@v4
with:
flavor: |
latest=false
suffix=${{ matrix.suffix }}
images: |
${{ vars.DOCKER_REPO }}
tags: |
type=ref,enable=true,priority=600,prefix=pr-,event=pr
type=semver,pattern={{version}}
- uses: actions/download-artifact@v4
with:
pattern: digest-*
path: ${{ runner.temp }}
merge-multiple: true
- run: |
docker buildx imagetools create $(echo '${{ steps.metadata.outputs.json }}' | jq -cr '.tags | map("-t", .) | join(" ")') $(cat *-${{ matrix.suffix }}.txt | xargs printf '${{ vars.DOCKER_REPO }}@%s ')
docker buildx imagetools inspect ${{ vars.DOCKER_REPO }}:${{ steps.metadata.outputs.version }}
working-directory: ${{ runner.temp }}
# Final release process
release:
runs-on: ubuntu-latest
environment: release
needs: [darwin-build, windows-app, linux-build]
permissions:
contents: write
env:
GH_TOKEN: ${{ github.token }}
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v4
with:
pattern: bundles-*
path: dist
merge-multiple: true
- name: Log dist contents
run: |
ls -l dist/
- name: Generate checksum file
run: find . -type f -not -name 'sha256sum.txt' | xargs sha256sum | tee sha256sum.txt
working-directory: dist
- name: Create or update Release for tag
run: |
RELEASE_VERSION="$(echo ${GITHUB_REF_NAME} | cut -f1 -d-)"
echo "Looking for existing release for ${RELEASE_VERSION}"
OLD_TAG=$(gh release ls --json name,tagName | jq -r ".[] | select(.name == \"${RELEASE_VERSION}\") | .tagName")
if [ -n "$OLD_TAG" ]; then
echo "Updating release ${RELEASE_VERSION} to point to new tag ${GITHUB_REF_NAME}"
gh release edit ${OLD_TAG} --tag ${GITHUB_REF_NAME}
else
echo "Creating new release ${RELEASE_VERSION} pointing to tag ${GITHUB_REF_NAME}"
gh release create ${GITHUB_REF_NAME} \
--title ${RELEASE_VERSION} \
--draft \
--generate-notes \
--prerelease
fi
- name: Upload release artifacts
run: |
pids=()
for payload in dist/*.txt dist/*.zip dist/*.tgz dist/*.exe dist/*.dmg ; do
echo "Uploading $payload"
gh release upload ${GITHUB_REF_NAME} $payload --clobber &
pids[$!]=$!
sleep 1
done
echo "Waiting for uploads to complete"
for pid in "${pids[*]}"; do
wait $pid
done
echo "done"

View File

@@ -1,245 +0,0 @@
name: test
concurrency:
# For PRs, later CI runs preempt previous ones. e.g. a force push on a PR
# cancels running CI jobs and starts all new ones.
#
# For non-PR pushes, concurrency.group needs to be unique for every distinct
# CI run we want to have happen. Use run_id, which in practice means all
# non-PR CI runs will be allowed to run without preempting each other.
group: ${{ github.workflow }}-$${{ github.pull_request.number || github.run_id }}
cancel-in-progress: true
on:
pull_request:
paths:
- '**/*'
- '!docs/**'
- '!README.md'
jobs:
changes:
runs-on: ubuntu-latest
outputs:
changed: ${{ steps.changes.outputs.changed }}
vendorsha: ${{ steps.changes.outputs.vendorsha }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- id: changes
run: |
changed() {
local BASE=${{ github.event.pull_request.base.sha }}
local HEAD=${{ github.event.pull_request.head.sha }}
local MERGE_BASE=$(git merge-base $BASE $HEAD)
git diff-tree -r --no-commit-id --name-only "$MERGE_BASE" "$HEAD" \
| xargs python3 -c "import sys; from pathlib import Path; print(any(Path(x).match(glob) for x in sys.argv[1:] for glob in '$*'.split(' ')))"
}
echo changed=$(changed 'llama/llama.cpp/**/*' 'ml/backend/ggml/ggml/**/*') | tee -a $GITHUB_OUTPUT
echo vendorsha=$(make -f Makefile.sync print-base) | tee -a $GITHUB_OUTPUT
linux:
needs: [changes]
if: needs.changes.outputs.changed == 'True'
strategy:
matrix:
include:
- preset: CPU
- preset: CUDA
container: nvidia/cuda:13.0.0-devel-ubuntu22.04
flags: '-DCMAKE_CUDA_ARCHITECTURES=87'
- preset: ROCm
container: rocm/dev-ubuntu-22.04:6.1.2
extra-packages: rocm-libs
flags: '-DAMDGPU_TARGETS=gfx1010 -DCMAKE_PREFIX_PATH=/opt/rocm'
- preset: Vulkan
container: ubuntu:22.04
extra-packages: >
mesa-vulkan-drivers vulkan-tools
libvulkan1 libvulkan-dev
vulkan-sdk cmake ccache g++ make
runs-on: linux
container: ${{ matrix.container }}
steps:
- uses: actions/checkout@v4
- run: |
[ -n "${{ matrix.container }}" ] || sudo=sudo
$sudo apt-get update
# Add LunarG Vulkan SDK apt repo for Ubuntu 22.04
if [ "${{ matrix.preset }}" = "Vulkan" ]; then
$sudo apt-get install -y --no-install-recommends wget gnupg ca-certificates software-properties-common
wget -qO - https://packages.lunarg.com/lunarg-signing-key-pub.asc | $sudo gpg --dearmor -o /usr/share/keyrings/lunarg-archive-keyring.gpg
# Use signed-by to bind the repo to the installed keyring to avoid NO_PUBKEY
echo "deb [signed-by=/usr/share/keyrings/lunarg-archive-keyring.gpg] https://packages.lunarg.com/vulkan/1.4.313 jammy main" | $sudo tee /etc/apt/sources.list.d/lunarg-vulkan-1.4.313-jammy.list > /dev/null
$sudo apt-get update
fi
$sudo apt-get install -y cmake ccache ${{ matrix.extra-packages }}
# Export VULKAN_SDK if provided by LunarG package (defensive)
if [ -d "/usr/lib/x86_64-linux-gnu/vulkan" ] && [ "${{ matrix.preset }}" = "Vulkan" ]; then
echo "VULKAN_SDK=/usr" >> $GITHUB_ENV
fi
env:
DEBIAN_FRONTEND: noninteractive
- uses: actions/cache@v4
with:
path: /github/home/.cache/ccache
key: ccache-${{ runner.os }}-${{ runner.arch }}-${{ matrix.preset }}-${{ needs.changes.outputs.vendorsha }}
- run: |
cmake --preset ${{ matrix.preset }} ${{ matrix.flags }}
cmake --build --preset ${{ matrix.preset }} --parallel
windows:
needs: [changes]
if: needs.changes.outputs.changed == 'True'
strategy:
matrix:
include:
- preset: CPU
- preset: CUDA
install: https://developer.download.nvidia.com/compute/cuda/13.0.0/local_installers/cuda_13.0.0_windows.exe
flags: '-DCMAKE_CUDA_ARCHITECTURES=80'
cuda-components:
- '"cudart"'
- '"nvcc"'
- '"cublas"'
- '"cublas_dev"'
- '"crt"'
- '"nvvm"'
- '"nvptxcompiler"'
cuda-version: '13.0'
- preset: ROCm
install: https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-24.Q4-WinSvr2022-For-HIP.exe
flags: '-DAMDGPU_TARGETS=gfx1010 -DCMAKE_C_COMPILER=clang -DCMAKE_CXX_COMPILER=clang++ -DCMAKE_C_FLAGS="-parallel-jobs=4 -Wno-ignored-attributes -Wno-deprecated-pragma" -DCMAKE_CXX_FLAGS="-parallel-jobs=4 -Wno-ignored-attributes -Wno-deprecated-pragma"'
- preset: Vulkan
install: https://sdk.lunarg.com/sdk/download/1.4.321.1/windows/vulkansdk-windows-X64-1.4.321.1.exe
runs-on: windows
steps:
- run: |
choco install -y --no-progress ccache ninja
ccache -o cache_dir=${{ github.workspace }}\.ccache
- if: matrix.preset == 'CUDA' || matrix.preset == 'ROCm' || matrix.preset == 'Vulkan'
id: cache-install
uses: actions/cache/restore@v4
with:
path: |
C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA
C:\Program Files\AMD\ROCm
C:\VulkanSDK
key: ${{ matrix.install }}
- if: matrix.preset == 'CUDA'
name: Install CUDA ${{ matrix.cuda-version }}
run: |
$ErrorActionPreference = "Stop"
if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') {
Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe"
$subpackages = @(${{ join(matrix.cuda-components, ', ') }}) | Foreach-Object {"${_}_${{ matrix.cuda-version }}"}
Start-Process -FilePath .\install.exe -ArgumentList (@("-s") + $subpackages) -NoNewWindow -Wait
}
$cudaPath = (Resolve-Path "C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\*").path
echo "$cudaPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- if: matrix.preset == 'ROCm'
name: Install ROCm ${{ matrix.rocm-version }}
run: |
$ErrorActionPreference = "Stop"
if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') {
Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe"
Start-Process -FilePath .\install.exe -ArgumentList '-install' -NoNewWindow -Wait
}
$hipPath = (Resolve-Path "C:\Program Files\AMD\ROCm\*").path
echo "$hipPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
echo "CC=$hipPath\bin\clang.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
echo "CXX=$hipPath\bin\clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
echo "HIPCXX=$hipPath\bin\clang++.exe" | Out-File -FilePath $env:GITHUB_ENV -Append
echo "HIP_PLATFORM=amd" | Out-File -FilePath $env:GITHUB_ENV -Append
echo "CMAKE_PREFIX_PATH=$hipPath" | Out-File -FilePath $env:GITHUB_ENV -Append
- if: matrix.preset == 'Vulkan'
name: Install Vulkan ${{ matrix.rocm-version }}
run: |
$ErrorActionPreference = "Stop"
if ("${{ steps.cache-install.outputs.cache-hit }}" -ne 'true') {
Invoke-WebRequest -Uri "${{ matrix.install }}" -OutFile "install.exe"
Start-Process -FilePath .\install.exe -ArgumentList "-c","--am","--al","in" -NoNewWindow -Wait
}
$vulkanPath = (Resolve-Path "C:\VulkanSDK\*").path
echo "$vulkanPath\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
echo "VULKAN_SDK=$vulkanPath" >> $env:GITHUB_ENV
- if: ${{ !cancelled() && steps.cache-install.outputs.cache-hit != 'true' }}
uses: actions/cache/save@v4
with:
path: |
C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA
C:\Program Files\AMD\ROCm
C:\VulkanSDK
key: ${{ matrix.install }}
- uses: actions/checkout@v4
- uses: actions/cache@v4
with:
path: ${{ github.workspace }}\.ccache
key: ccache-${{ runner.os }}-${{ runner.arch }}-${{ matrix.preset }}-${{ needs.changes.outputs.vendorsha }}
- run: |
Import-Module 'C:\Program Files\Microsoft Visual Studio\2022\Enterprise\Common7\Tools\Microsoft.VisualStudio.DevShell.dll'
Enter-VsDevShell -VsInstallPath 'C:\Program Files\Microsoft Visual Studio\2022\Enterprise' -SkipAutomaticLocation -DevCmdArguments '-arch=x64 -no_logo'
cmake --preset "${{ matrix.preset }}" ${{ matrix.flags }}
cmake --build --parallel --preset "${{ matrix.preset }}"
env:
CMAKE_GENERATOR: Ninja
go_mod_tidy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: check that 'go mod tidy' is clean
run: go mod tidy --diff || (echo "Please run 'go mod tidy'." && exit 1)
test:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.os }}
env:
CGO_ENABLED: '1'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with:
go-version-file: 'go.mod'
cache-dependency-path: |
go.sum
Makefile.sync
- uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install UI dependencies
working-directory: ./app/ui/app
run: npm ci
- name: Install tscriptify
run: |
go install github.com/tkrajina/typescriptify-golang-structs/tscriptify@latest
- name: Run UI tests
if: ${{ startsWith(matrix.os, 'ubuntu') }}
working-directory: ./app/ui/app
run: npm test
- name: Run go generate
run: go generate ./...
- name: go test
if: always()
run: go test -count=1 -benchtime=1x ./...
- uses: golangci/golangci-lint-action@v9
with:
only-new-issues: true
patches:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Verify patches apply cleanly and do not change files
run: |
make -f Makefile.sync clean checkout apply-patches sync
git diff --compact-summary --exit-code

13
.gitignore vendored
View File

@@ -2,16 +2,5 @@
.vscode
.env
.venv
.swp
dist
build
.cache
.gocache
*.exe
.idea
test_data
*.crt
__debug_bin*
llama/build
llama/vendor
/ollama
ollama

View File

@@ -1,51 +0,0 @@
version: "2"
linters:
enable:
- asasalint
- bidichk
- bodyclose
- containedctx
- gocheckcompilerdirectives
- intrange
- makezero
- misspell
- nilerr
- nolintlint
- nosprintfhostport
- unconvert
- usetesting
- wastedassign
- whitespace
disable:
- errcheck
- usestdlibvars
settings:
govet:
disable:
- unusedresult
staticcheck:
checks:
- all
- -QF* # disable quick fix suggestions
- -SA1019
- -ST1000 # package comment format
- -ST1003 # underscores in package names
- -ST1005 # error strings should not be capitalized
- -ST1012 # error var naming (ErrFoo)
- -ST1016 # receiver name consistency
- -ST1020 # comment on exported function format
- -ST1021 # comment on exported type format
- -ST1022 # comment on exported var format
- -ST1023 # omit type from declaration
severity:
default: error
rules:
- linters:
- gofmt
- goimports
- intrange
severity: info
formatters:
enable:
- gofmt
- gofumpt

10
.prettierrc.json Normal file
View File

@@ -0,0 +1,10 @@
{
"trailingComma": "es5",
"tabWidth": 2,
"useTabs": false,
"semi": false,
"singleQuote": true,
"jsxSingleQuote": true,
"printWidth": 120,
"arrowParens": "avoid"
}

View File

@@ -1,153 +0,0 @@
cmake_minimum_required(VERSION 3.21)
project(Ollama C CXX)
include(CheckLanguage)
include(GNUInstallDirs)
find_package(Threads REQUIRED)
set(CMAKE_BUILD_TYPE Release)
set(BUILD_SHARED_LIBS ON)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
set(GGML_BUILD ON)
set(GGML_SHARED ON)
set(GGML_CCACHE ON)
set(GGML_BACKEND_DL ON)
set(GGML_BACKEND_SHARED ON)
set(GGML_SCHED_MAX_COPIES 4)
set(GGML_LLAMAFILE ON)
set(GGML_CUDA_PEER_MAX_BATCH_SIZE 128)
set(GGML_CUDA_GRAPHS ON)
set(GGML_CUDA_FA ON)
set(GGML_CUDA_COMPRESSION_MODE default)
if((CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_OSX_ARCHITECTURES MATCHES "arm64")
OR (NOT CMAKE_OSX_ARCHITECTURES AND NOT CMAKE_SYSTEM_PROCESSOR MATCHES "arm|aarch64|ARM64|ARMv[0-9]+"))
set(GGML_CPU_ALL_VARIANTS ON)
endif()
if (CMAKE_OSX_ARCHITECTURES MATCHES "x86_64")
set(CMAKE_BUILD_RPATH "@loader_path")
set(CMAKE_INSTALL_RPATH "@loader_path")
endif()
set(OLLAMA_BUILD_DIR ${CMAKE_BINARY_DIR}/lib/ollama)
set(OLLAMA_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib/ollama/${OLLAMA_RUNNER_DIR})
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${OLLAMA_BUILD_DIR})
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_DEBUG ${OLLAMA_BUILD_DIR})
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_RELEASE ${OLLAMA_BUILD_DIR})
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${OLLAMA_BUILD_DIR})
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY_DEBUG ${OLLAMA_BUILD_DIR})
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY_RELEASE ${OLLAMA_BUILD_DIR})
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/include)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-cpu)
include_directories(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-cpu/amx)
add_compile_definitions(NDEBUG GGML_VERSION=0x0 GGML_COMMIT=0x0)
set(GGML_CPU ON)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src)
set_property(TARGET ggml PROPERTY EXCLUDE_FROM_ALL TRUE)
get_target_property(CPU_VARIANTS ggml-cpu MANUALLY_ADDED_DEPENDENCIES)
if(NOT CPU_VARIANTS)
set(CPU_VARIANTS "ggml-cpu")
endif()
install(TARGETS ggml-base ${CPU_VARIANTS}
RUNTIME_DEPENDENCIES
PRE_EXCLUDE_REGEXES ".*"
RUNTIME DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CPU
LIBRARY DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CPU
FRAMEWORK DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CPU
)
check_language(CUDA)
if(CMAKE_CUDA_COMPILER)
if(CMAKE_VERSION VERSION_GREATER_EQUAL "3.24" AND NOT CMAKE_CUDA_ARCHITECTURES)
set(CMAKE_CUDA_ARCHITECTURES "native")
endif()
find_package(CUDAToolkit)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-cuda)
install(TARGETS ggml-cuda
RUNTIME_DEPENDENCIES
DIRECTORIES ${CUDAToolkit_BIN_DIR} ${CUDAToolkit_BIN_DIR}/x64 ${CUDAToolkit_LIBRARY_DIR}
PRE_INCLUDE_REGEXES cublas cublasLt cudart
PRE_EXCLUDE_REGEXES ".*"
RUNTIME DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CUDA
LIBRARY DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT CUDA
)
endif()
set(WINDOWS_AMDGPU_TARGETS_EXCLUDE_REGEX "^gfx(908|90a|1200|1201):xnack[+-]$"
CACHE STRING
"Regular expression describing AMDGPU_TARGETS not supported on Windows. Override to force building these targets. Default \"^gfx(908|90a|1200|1201):xnack[+-]$\"."
)
check_language(HIP)
if(CMAKE_HIP_COMPILER)
set(HIP_PLATFORM "amd")
if(NOT AMDGPU_TARGETS)
find_package(hip REQUIRED)
list(FILTER AMDGPU_TARGETS INCLUDE REGEX "^gfx(94[012]|101[02]|1030|110[012]|120[01])$")
endif()
if(WIN32 AND WINDOWS_AMDGPU_TARGETS_EXCLUDE_REGEX)
list(FILTER AMDGPU_TARGETS EXCLUDE REGEX ${WINDOWS_AMDGPU_TARGETS_EXCLUDE_REGEX})
endif()
if(AMDGPU_TARGETS)
find_package(hip REQUIRED)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-hip)
if (WIN32)
target_compile_definitions(ggml-hip PRIVATE GGML_CUDA_NO_PEER_COPY)
endif()
target_compile_definitions(ggml-hip PRIVATE GGML_HIP_NO_VMM)
install(TARGETS ggml-hip
RUNTIME_DEPENDENCY_SET rocm
RUNTIME DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT HIP
LIBRARY DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT HIP
)
install(RUNTIME_DEPENDENCY_SET rocm
DIRECTORIES ${HIP_BIN_INSTALL_DIR} ${HIP_LIB_INSTALL_DIR}
PRE_INCLUDE_REGEXES hipblas rocblas amdhip64 rocsolver amd_comgr hsa-runtime64 rocsparse tinfo rocprofiler-register drm drm_amdgpu numa elf
PRE_EXCLUDE_REGEXES ".*"
POST_EXCLUDE_REGEXES "system32"
RUNTIME DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT HIP
LIBRARY DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT HIP
)
foreach(HIP_LIB_BIN_INSTALL_DIR IN ITEMS ${HIP_BIN_INSTALL_DIR} ${HIP_LIB_INSTALL_DIR})
if(EXISTS ${HIP_LIB_BIN_INSTALL_DIR}/rocblas)
install(DIRECTORY ${HIP_LIB_BIN_INSTALL_DIR}/rocblas DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT HIP)
break()
endif()
endforeach()
endif()
endif()
find_package(Vulkan)
if(Vulkan_FOUND)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/ml/backend/ggml/ggml/src/ggml-vulkan)
install(TARGETS ggml-vulkan
RUNTIME_DEPENDENCIES
PRE_INCLUDE_REGEXES vulkan
PRE_EXCLUDE_REGEXES ".*"
RUNTIME DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT Vulkan
LIBRARY DESTINATION ${OLLAMA_INSTALL_DIR} COMPONENT Vulkan
)
endif()

View File

@@ -1,145 +0,0 @@
{
"version": 3,
"configurePresets": [
{
"name": "Default",
"binaryDir": "${sourceDir}/build",
"installDir": "${sourceDir}/dist",
"cacheVariables": {
"CMAKE_BUILD_TYPE": "Release",
"CMAKE_MSVC_RUNTIME_LIBRARY": "MultiThreaded"
}
},
{
"name": "CPU",
"inherits": [ "Default" ]
},
{
"name": "CUDA",
"inherits": [ "Default" ]
},
{
"name": "CUDA 11",
"inherits": [ "CUDA" ],
"cacheVariables": {
"CMAKE_CUDA_ARCHITECTURES": "50-virtual;60-virtual;61-virtual;70-virtual;75-virtual;80-virtual;86-virtual;87-virtual;89-virtual;90-virtual",
"CMAKE_CUDA_FLAGS": "-Wno-deprecated-gpu-targets -t 2",
"OLLAMA_RUNNER_DIR": "cuda_v11"
}
},
{
"name": "CUDA 12",
"inherits": [ "CUDA" ],
"cacheVariables": {
"CMAKE_CUDA_ARCHITECTURES": "50;52;60;61;70;75;80;86;89;90;90a;120",
"CMAKE_CUDA_FLAGS": "-Wno-deprecated-gpu-targets -t 2",
"OLLAMA_RUNNER_DIR": "cuda_v12"
}
},
{
"name": "CUDA 13",
"inherits": [ "CUDA" ],
"cacheVariables": {
"CMAKE_CUDA_ARCHITECTURES": "75-virtual;80-virtual;86-virtual;87-virtual;89-virtual;90-virtual;90a-virtual;100-virtual;103-virtual;110-virtual;120-virtual;121-virtual",
"CMAKE_CUDA_FLAGS": "-t 2",
"OLLAMA_RUNNER_DIR": "cuda_v13"
}
},
{
"name": "JetPack 5",
"inherits": [ "CUDA" ],
"cacheVariables": {
"CMAKE_CUDA_ARCHITECTURES": "72;87",
"OLLAMA_RUNNER_DIR": "cuda_jetpack5"
}
},
{
"name": "JetPack 6",
"inherits": [ "CUDA" ],
"cacheVariables": {
"CMAKE_CUDA_ARCHITECTURES": "87",
"OLLAMA_RUNNER_DIR": "cuda_jetpack6"
}
},
{
"name": "ROCm",
"inherits": [ "Default" ],
"cacheVariables": {
"CMAKE_HIP_PLATFORM": "amd"
}
},
{
"name": "ROCm 6",
"inherits": [ "ROCm" ],
"cacheVariables": {
"CMAKE_HIP_FLAGS": "-parallel-jobs=4",
"AMDGPU_TARGETS": "gfx940;gfx941;gfx942;gfx1010;gfx1012;gfx1030;gfx1100;gfx1101;gfx1102;gfx1151;gfx1200;gfx1201;gfx908:xnack-;gfx90a:xnack+;gfx90a:xnack-",
"OLLAMA_RUNNER_DIR": "rocm"
}
},
{
"name": "Vulkan",
"inherits": [ "Default" ],
"cacheVariables": {
"OLLAMA_RUNNER_DIR": "vulkan"
}
}
],
"buildPresets": [
{
"name": "Default",
"configurePreset": "Default",
"configuration": "Release"
},
{
"name": "CPU",
"configurePreset": "Default",
"targets": [ "ggml-cpu" ]
},
{
"name": "CUDA",
"configurePreset": "CUDA",
"targets": [ "ggml-cuda" ]
},
{
"name": "CUDA 11",
"inherits": [ "CUDA" ],
"configurePreset": "CUDA 11"
},
{
"name": "CUDA 12",
"inherits": [ "CUDA" ],
"configurePreset": "CUDA 12"
},
{
"name": "CUDA 13",
"inherits": [ "CUDA" ],
"configurePreset": "CUDA 13"
},
{
"name": "JetPack 5",
"inherits": [ "CUDA" ],
"configurePreset": "JetPack 5"
},
{
"name": "JetPack 6",
"inherits": [ "CUDA" ],
"configurePreset": "JetPack 6"
},
{
"name": "ROCm",
"configurePreset": "ROCm",
"targets": [ "ggml-hip" ]
},
{
"name": "ROCm 6",
"inherits": [ "ROCm" ],
"configurePreset": "ROCm 6"
},
{
"name": "Vulkan",
"targets": [ "ggml-vulkan" ],
"configurePreset": "Vulkan"
}
]
}

View File

@@ -1,88 +0,0 @@
# Contributing to Ollama
Thank you for your interest in contributing to Ollama! Here are a few guidelines to help get you started.
## Set up
See the [development documentation](./docs/development.md) for instructions on how to build and run Ollama locally.
### Ideal issues
* [Bugs](https://github.com/ollama/ollama/issues?q=is%3Aissue+is%3Aopen+label%3Abug): issues where Ollama stops working or where it results in an unexpected error.
* [Performance](https://github.com/ollama/ollama/issues?q=is%3Aissue+is%3Aopen+label%3Aperformance): issues to make Ollama faster at model inference, downloading or uploading.
* [Security](https://github.com/ollama/ollama/blob/main/SECURITY.md): issues that could lead to a security vulnerability. As mentioned in [SECURITY.md](https://github.com/ollama/ollama/blob/main/SECURITY.md), please do not disclose security vulnerabilities publicly.
### Issues that are harder to review
* New features: new features (e.g. API fields, environment variables) add surface area to Ollama and make it harder to maintain in the long run as they cannot be removed without potentially breaking users in the future.
* Refactoring: large code improvements are important, but can be harder or take longer to review and merge.
* Documentation: small updates to fill in or correct missing documentation are helpful, however large documentation additions can be hard to maintain over time.
### Issues that may not be accepted
* Changes that break backwards compatibility in Ollama's API (including the OpenAI-compatible API)
* Changes that add significant friction to the user experience
* Changes that create a large future maintenance burden for maintainers and contributors
## Proposing a (non-trivial) change
> By "non-trivial", we mean a change that is not a bug fix or small
> documentation update. If you are unsure, please ask us on our [Discord
> server](https://discord.gg/ollama).
Before opening a non-trivial Pull Request, please open an issue to discuss the change and
get feedback from the maintainers. This helps us understand the context of the
change and how it fits into Ollama's roadmap and prevents us from duplicating
work or you from spending time on a change that we may not be able to accept.
Tips for proposals:
* Explain the problem you are trying to solve, not what you are trying to do.
* Explain why the change is important.
* Explain how the change will be used.
* Explain how the change will be tested.
Additionally, for bonus points: Provide draft documentation you would expect to
see if the changes were accepted.
## Pull requests
**Commit messages**
The title should look like:
<package>: <short description>
The package is the most affected Go package. If the change does not affect Go
code, then use the directory name instead. Changes to a single well-known
file in the root directory may use the file name.
The short description should start with a lowercase letter and be a
continuation of the sentence:
"This changes Ollama to..."
Examples:
llm/backend/mlx: support the llama architecture
CONTRIBUTING: provide clarity on good commit messages, and bad
Bad Examples:
feat: add more emoji
fix: was not using famous web framework
chore: generify code
**Tests**
Please include tests. Strive to test behavior, not implementation.
**New dependencies**
Dependencies should be added sparingly. If you are adding a new dependency,
please explain why it is necessary and what other ways you attempted that
did not work without it.
## Need help?
If you need help with anything, feel free to reach out to us on our [Discord server](https://discord.gg/ollama).

View File

@@ -1,186 +1,15 @@
# vim: filetype=dockerfile
ARG FLAVOR=${TARGETARCH}
ARG PARALLEL=8
ARG ROCMVERSION=6.3.3
ARG JETPACK5VERSION=r35.4.1
ARG JETPACK6VERSION=r36.4.0
ARG CMAKEVERSION=3.31.2
ARG VULKANVERSION=1.4.321.1
# We require gcc v10 minimum. v10.3 has regressions, so the rockylinux 8.5 AppStream has the latest compatible version
FROM --platform=linux/amd64 rocm/dev-almalinux-8:${ROCMVERSION}-complete AS base-amd64
RUN yum install -y yum-utils \
&& yum-config-manager --add-repo https://dl.rockylinux.org/vault/rocky/8.5/AppStream/\$basearch/os/ \
&& rpm --import https://dl.rockylinux.org/pub/rocky/RPM-GPG-KEY-Rocky-8 \
&& dnf install -y yum-utils ccache gcc-toolset-10-gcc-10.2.1-8.2.el8 gcc-toolset-10-gcc-c++-10.2.1-8.2.el8 gcc-toolset-10-binutils-2.35-11.el8 \
&& dnf install -y ccache \
&& yum-config-manager --add-repo https://developer.download.nvidia.com/compute/cuda/repos/rhel8/x86_64/cuda-rhel8.repo
ENV PATH=/opt/rh/gcc-toolset-10/root/usr/bin:$PATH
ARG VULKANVERSION
RUN wget https://sdk.lunarg.com/sdk/download/${VULKANVERSION}/linux/vulkansdk-linux-x86_64-${VULKANVERSION}.tar.xz -O /tmp/vulkansdk-linux-x86_64-${VULKANVERSION}.tar.xz \
&& tar xvf /tmp/vulkansdk-linux-x86_64-${VULKANVERSION}.tar.xz \
&& dnf -y install ninja-build \
&& ln -s /usr/bin/python3 /usr/bin/python \
&& /${VULKANVERSION}/vulkansdk -j 8 vulkan-headers \
&& /${VULKANVERSION}/vulkansdk -j 8 shaderc
RUN cp -r /${VULKANVERSION}/x86_64/include/* /usr/local/include/ \
&& cp -r /${VULKANVERSION}/x86_64/lib/* /usr/local/lib
ENV PATH=/${VULKANVERSION}/x86_64/bin:$PATH
FROM --platform=linux/arm64 almalinux:8 AS base-arm64
# install epel-release for ccache
RUN yum install -y yum-utils epel-release \
&& dnf install -y clang ccache \
&& yum-config-manager --add-repo https://developer.download.nvidia.com/compute/cuda/repos/rhel8/sbsa/cuda-rhel8.repo
ENV CC=clang CXX=clang++
FROM base-${TARGETARCH} AS base
ARG CMAKEVERSION
RUN curl -fsSL https://github.com/Kitware/CMake/releases/download/v${CMAKEVERSION}/cmake-${CMAKEVERSION}-linux-$(uname -m).tar.gz | tar xz -C /usr/local --strip-components 1
ENV LDFLAGS=-s
FROM base AS cpu
RUN dnf install -y gcc-toolset-11-gcc gcc-toolset-11-gcc-c++
ENV PATH=/opt/rh/gcc-toolset-11/root/usr/bin:$PATH
ARG PARALLEL
COPY CMakeLists.txt CMakePresets.json .
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
RUN --mount=type=cache,target=/root/.ccache \
cmake --preset 'CPU' \
&& cmake --build --parallel ${PARALLEL} --preset 'CPU' \
&& cmake --install build --component CPU --strip --parallel ${PARALLEL}
FROM base AS cuda-11
ARG CUDA11VERSION=11.8
RUN dnf install -y cuda-toolkit-${CUDA11VERSION//./-}
ENV PATH=/usr/local/cuda-11/bin:$PATH
ARG PARALLEL
COPY CMakeLists.txt CMakePresets.json .
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
RUN --mount=type=cache,target=/root/.ccache \
cmake --preset 'CUDA 11' \
&& cmake --build --parallel ${PARALLEL} --preset 'CUDA 11' \
&& cmake --install build --component CUDA --strip --parallel ${PARALLEL}
FROM base AS cuda-12
ARG CUDA12VERSION=12.8
RUN dnf install -y cuda-toolkit-${CUDA12VERSION//./-}
ENV PATH=/usr/local/cuda-12/bin:$PATH
ARG PARALLEL
COPY CMakeLists.txt CMakePresets.json .
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
RUN --mount=type=cache,target=/root/.ccache \
cmake --preset 'CUDA 12' \
&& cmake --build --parallel ${PARALLEL} --preset 'CUDA 12' \
&& cmake --install build --component CUDA --strip --parallel ${PARALLEL}
FROM base AS cuda-13
ARG CUDA13VERSION=13.0
RUN dnf install -y cuda-toolkit-${CUDA13VERSION//./-}
ENV PATH=/usr/local/cuda-13/bin:$PATH
ARG PARALLEL
COPY CMakeLists.txt CMakePresets.json .
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
RUN --mount=type=cache,target=/root/.ccache \
cmake --preset 'CUDA 13' \
&& cmake --build --parallel ${PARALLEL} --preset 'CUDA 13' \
&& cmake --install build --component CUDA --strip --parallel ${PARALLEL}
FROM base AS rocm-6
ENV PATH=/opt/rocm/hcc/bin:/opt/rocm/hip/bin:/opt/rocm/bin:/opt/rocm/hcc/bin:$PATH
ARG PARALLEL
COPY CMakeLists.txt CMakePresets.json .
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
RUN --mount=type=cache,target=/root/.ccache \
cmake --preset 'ROCm 6' \
&& cmake --build --parallel ${PARALLEL} --preset 'ROCm 6' \
&& cmake --install build --component HIP --strip --parallel ${PARALLEL}
RUN rm -f dist/lib/ollama/rocm/rocblas/library/*gfx90[06]*
FROM --platform=linux/arm64 nvcr.io/nvidia/l4t-jetpack:${JETPACK5VERSION} AS jetpack-5
ARG CMAKEVERSION
RUN apt-get update && apt-get install -y curl ccache \
&& curl -fsSL https://github.com/Kitware/CMake/releases/download/v${CMAKEVERSION}/cmake-${CMAKEVERSION}-linux-$(uname -m).tar.gz | tar xz -C /usr/local --strip-components 1
COPY CMakeLists.txt CMakePresets.json .
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
ARG PARALLEL
RUN --mount=type=cache,target=/root/.ccache \
cmake --preset 'JetPack 5' \
&& cmake --build --parallel ${PARALLEL} --preset 'JetPack 5' \
&& cmake --install build --component CUDA --strip --parallel ${PARALLEL}
FROM --platform=linux/arm64 nvcr.io/nvidia/l4t-jetpack:${JETPACK6VERSION} AS jetpack-6
ARG CMAKEVERSION
RUN apt-get update && apt-get install -y curl ccache \
&& curl -fsSL https://github.com/Kitware/CMake/releases/download/v${CMAKEVERSION}/cmake-${CMAKEVERSION}-linux-$(uname -m).tar.gz | tar xz -C /usr/local --strip-components 1
COPY CMakeLists.txt CMakePresets.json .
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
ARG PARALLEL
RUN --mount=type=cache,target=/root/.ccache \
cmake --preset 'JetPack 6' \
&& cmake --build --parallel ${PARALLEL} --preset 'JetPack 6' \
&& cmake --install build --component CUDA --strip --parallel ${PARALLEL}
FROM base AS vulkan
COPY CMakeLists.txt CMakePresets.json .
COPY ml/backend/ggml/ggml ml/backend/ggml/ggml
RUN --mount=type=cache,target=/root/.ccache \
cmake --preset 'Vulkan' \
&& cmake --build --parallel --preset 'Vulkan' \
&& cmake --install build --component Vulkan --strip --parallel 8
FROM base AS build
WORKDIR /go/src/github.com/ollama/ollama
COPY go.mod go.sum .
RUN curl -fsSL https://golang.org/dl/go$(awk '/^go/ { print $2 }' go.mod).linux-$(case $(uname -m) in x86_64) echo amd64 ;; aarch64) echo arm64 ;; esac).tar.gz | tar xz -C /usr/local
ENV PATH=/usr/local/go/bin:$PATH
RUN go mod download
FROM golang:1.20
WORKDIR /go/src/github.com/jmorganca/ollama
COPY . .
ARG GOFLAGS="'-ldflags=-w -s'"
ENV CGO_ENABLED=1
ARG CGO_CFLAGS
ARG CGO_CXXFLAGS
RUN --mount=type=cache,target=/root/.cache/go-build \
go build -trimpath -buildmode=pie -o /bin/ollama .
RUN CGO_ENABLED=1 go build -ldflags '-linkmode external -extldflags "-static"' .
FROM --platform=linux/amd64 scratch AS amd64
# COPY --from=cuda-11 dist/lib/ollama/ /lib/ollama/
COPY --from=cuda-12 dist/lib/ollama /lib/ollama/
COPY --from=cuda-13 dist/lib/ollama /lib/ollama/
COPY --from=vulkan dist/lib/ollama /lib/ollama/
FROM --platform=linux/arm64 scratch AS arm64
# COPY --from=cuda-11 dist/lib/ollama/ /lib/ollama/
COPY --from=cuda-12 dist/lib/ollama /lib/ollama/
COPY --from=cuda-13 dist/lib/ollama/ /lib/ollama/
COPY --from=jetpack-5 dist/lib/ollama/ /lib/ollama/
COPY --from=jetpack-6 dist/lib/ollama/ /lib/ollama/
FROM scratch AS rocm
COPY --from=rocm-6 dist/lib/ollama /lib/ollama
FROM ${FLAVOR} AS archive
ARG VULKANVERSION
COPY --from=cpu dist/lib/ollama /lib/ollama
COPY --from=build /bin/ollama /bin/ollama
FROM ubuntu:24.04
RUN apt-get update \
&& apt-get install -y ca-certificates libvulkan1 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
COPY --from=archive /bin /usr/bin
ENV PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
COPY --from=archive /lib/ollama /usr/lib/ollama
ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64
ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility
ENV NVIDIA_VISIBLE_DEVICES=all
ENV OLLAMA_HOST=0.0.0.0:11434
FROM alpine
COPY --from=0 /go/src/github.com/jmorganca/ollama/ollama /bin/ollama
EXPOSE 11434
ARG USER=ollama
ARG GROUP=ollama
RUN addgroup -g 1000 $GROUP && adduser -u 1000 -DG $GROUP $USER
USER $USER:$GROUP
ENTRYPOINT ["/bin/ollama"]
ENV OLLAMA_HOST 0.0.0.0
CMD ["serve"]

View File

@@ -1,76 +0,0 @@
UPSTREAM=https://github.com/ggml-org/llama.cpp.git
WORKDIR=llama/vendor
FETCH_HEAD=17f7f4baad8b3a716ee139da7bb56ae984e8c0fa
.PHONY: help
help:
@echo "Available targets:"
@echo " sync Sync with upstream repositories"
@echo " checkout Checkout upstream repository"
@echo " apply-patches Apply patches to local repository"
@echo " format-patches Format patches from local repository"
@echo " clean Clean local repository"
@echo
@echo "Example:"
@echo " make -f $(lastword $(MAKEFILE_LIST)) clean apply-patches sync"
.PHONY: sync
sync: llama/build-info.cpp ml/backend/ggml/ggml/src/ggml-metal/ggml-metal-embed.metal
llama/build-info.cpp: llama/build-info.cpp.in llama/llama.cpp
sed -e 's|@FETCH_HEAD@|$(FETCH_HEAD)|' <$< >$@
ml/backend/ggml/ggml/src/ggml-metal/ggml-metal-embed.metal: ml/backend/ggml/ggml
go generate ./$(@D)
.PHONY: llama/llama.cpp
llama/llama.cpp: llama/vendor
rsync -arvzc --delete -f "include LICENSE" -f "merge $@/.rsync-filter" $(addprefix $<,/LICENSE /) $@
.PHONY: ml/backend/ggml/ggml
ml/backend/ggml/ggml: llama/vendor
rsync -arvzc --delete -f "include LICENSE" -f "merge $@/.rsync-filter" $(addprefix $<,/LICENSE /ggml/) $@
PATCHES=$(wildcard llama/patches/*.patch)
PATCHED=$(join $(dir $(PATCHES)), $(addsuffix ed, $(addprefix ., $(notdir $(PATCHES)))))
.PHONY: apply-patches
.NOTPARALLEL:
apply-patches: $(PATCHED)
llama/patches/.%.patched: llama/patches/%.patch
@if git -c user.name=nobody -c 'user.email=<>' -C $(WORKDIR) am -3 $(realpath $<); then \
touch $@; \
else \
echo "Patch failed. Resolve any conflicts then continue."; \
echo "1. Run 'git -C $(WORKDIR) am --continue'"; \
echo "2. Run 'make -f $(lastword $(MAKEFILE_LIST)) format-patches'"; \
echo "3. Run 'make -f $(lastword $(MAKEFILE_LIST)) clean apply-patches'"; \
exit 1; \
fi
.PHONY: checkout
checkout: $(WORKDIR)
git -C $(WORKDIR) fetch
git -C $(WORKDIR) checkout -f $(FETCH_HEAD)
$(WORKDIR):
git clone $(UPSTREAM) $(WORKDIR)
.PHONY: format-patches
format-patches: llama/patches
git -C $(WORKDIR) format-patch \
--no-signature \
--no-numbered \
--zero-commit \
-o $(realpath $<) \
$(FETCH_HEAD)
.PHONY: clean
clean: checkout
@git -C $(WORKDIR) am --abort || true
$(RM) llama/patches/.*.patched
.PHONY: print-base
print-base:
@echo $(FETCH_HEAD)

649
README.md
View File

@@ -1,647 +1,108 @@
<div align="center">
  <a href="https://ollama.com">
<img alt="ollama" width="240" src="https://github.com/ollama/ollama/assets/3325447/0d0b44e2-8f4a-4e99-9b52-a5c1c741c8f7">
</a>
</div>
![ollama](https://github.com/jmorganca/ollama/assets/251292/961f99bb-251a-4eec-897d-1ba99997ad0f)
# Ollama
Get up and running with large language models.
Run large language models with `llama.cpp`.
### macOS
> Note: certain models that can be run with Ollama are intended for research and/or non-commercial use only.
[Download](https://ollama.com/download/Ollama.dmg)
### Features
### Windows
- Download and run popular large language models
- Switch between multiple models on the fly
- Hardware acceleration where available (Metal, CUDA)
- Fast inference server written in Go, powered by [llama.cpp](https://github.com/ggerganov/llama.cpp)
- REST API to use with your application (python, typescript SDKs coming soon)
[Download](https://ollama.com/download/OllamaSetup.exe)
## Install
### Linux
- [Download](https://ollama.ai/download) for macOS with Apple Silicon (Intel coming soon)
- Download for Windows (coming soon)
```shell
curl -fsSL https://ollama.com/install.sh | sh
```
[Manual install instructions](https://docs.ollama.com/linux#manual-install)
### Docker
The official [Ollama Docker image](https://hub.docker.com/r/ollama/ollama) `ollama/ollama` is available on Docker Hub.
### Libraries
- [ollama-python](https://github.com/ollama/ollama-python)
- [ollama-js](https://github.com/ollama/ollama-js)
### Community
- [Discord](https://discord.gg/ollama)
- [Reddit](https://reddit.com/r/ollama)
You can also build the [binary from source](#building).
## Quickstart
To run and chat with [Gemma 3](https://ollama.com/library/gemma3):
```shell
ollama run gemma3
```
## Model library
Ollama supports a list of models available on [ollama.com/library](https://ollama.com/library 'ollama model library')
Here are some example models that can be downloaded:
| Model | Parameters | Size | Download |
| ------------------ | ---------- | ----- | -------------------------------- |
| Gemma 3 | 1B | 815MB | `ollama run gemma3:1b` |
| Gemma 3 | 4B | 3.3GB | `ollama run gemma3` |
| Gemma 3 | 12B | 8.1GB | `ollama run gemma3:12b` |
| Gemma 3 | 27B | 17GB | `ollama run gemma3:27b` |
| QwQ | 32B | 20GB | `ollama run qwq` |
| DeepSeek-R1 | 7B | 4.7GB | `ollama run deepseek-r1` |
| DeepSeek-R1 | 671B | 404GB | `ollama run deepseek-r1:671b` |
| Llama 4 | 109B | 67GB | `ollama run llama4:scout` |
| Llama 4 | 400B | 245GB | `ollama run llama4:maverick` |
| Llama 3.3 | 70B | 43GB | `ollama run llama3.3` |
| Llama 3.2 | 3B | 2.0GB | `ollama run llama3.2` |
| Llama 3.2 | 1B | 1.3GB | `ollama run llama3.2:1b` |
| Llama 3.2 Vision | 11B | 7.9GB | `ollama run llama3.2-vision` |
| Llama 3.2 Vision | 90B | 55GB | `ollama run llama3.2-vision:90b` |
| Llama 3.1 | 8B | 4.7GB | `ollama run llama3.1` |
| Llama 3.1 | 405B | 231GB | `ollama run llama3.1:405b` |
| Phi 4 | 14B | 9.1GB | `ollama run phi4` |
| Phi 4 Mini | 3.8B | 2.5GB | `ollama run phi4-mini` |
| Mistral | 7B | 4.1GB | `ollama run mistral` |
| Moondream 2 | 1.4B | 829MB | `ollama run moondream` |
| Neural Chat | 7B | 4.1GB | `ollama run neural-chat` |
| Starling | 7B | 4.1GB | `ollama run starling-lm` |
| Code Llama | 7B | 3.8GB | `ollama run codellama` |
| Llama 2 Uncensored | 7B | 3.8GB | `ollama run llama2-uncensored` |
| LLaVA | 7B | 4.5GB | `ollama run llava` |
| Granite-3.3 | 8B | 4.9GB | `ollama run granite3.3` |
> [!NOTE]
> You should have at least 8 GB of RAM available to run the 7B models, 16 GB to run the 13B models, and 32 GB to run the 33B models.
## Customize a model
### Import from GGUF
Ollama supports importing GGUF models in the Modelfile:
1. Create a file named `Modelfile`, with a `FROM` instruction with the local filepath to the model you want to import.
```
FROM ./vicuna-33b.Q4_0.gguf
```
2. Create the model in Ollama
```shell
ollama create example -f Modelfile
```
3. Run the model
```shell
ollama run example
```
### Import from Safetensors
See the [guide](https://docs.ollama.com/import) on importing models for more information.
### Customize a prompt
Models from the Ollama library can be customized with a prompt. For example, to customize the `llama3.2` model:
```shell
ollama pull llama3.2
```
Create a `Modelfile`:
Run a fast and simple model.
```
FROM llama3.2
# set the temperature to 1 [higher is more creative, lower is more coherent]
PARAMETER temperature 1
# set the system message
SYSTEM """
You are Mario from Super Mario Bros. Answer as Mario, the assistant, only.
"""
ollama run orca
```
Next, create and run the model:
## Example models
### 💬 Chat
Have a conversation.
```
ollama create mario -f ./Modelfile
ollama run mario
>>> hi
Hello! It's your friend Mario.
ollama run vicuna "Why is the sky blue?"
```
For more information on working with a Modelfile, see the [Modelfile](https://docs.ollama.com/modelfile) documentation.
### 🗺️ Instructions
## CLI Reference
### Create a model
`ollama create` is used to create a model from a Modelfile.
```shell
ollama create mymodel -f ./Modelfile
```
### Pull a model
```shell
ollama pull llama3.2
```
> This command can also be used to update a local model. Only the diff will be pulled.
### Remove a model
```shell
ollama rm llama3.2
```
### Copy a model
```shell
ollama cp llama3.2 my-model
```
### Multiline input
For multiline input, you can wrap text with `"""`:
Get a helping hand.
```
>>> """Hello,
... world!
... """
I'm a basic program that prints the famous "Hello, world!" message to the console.
ollama run orca "Write an email to my boss."
```
### Multimodal models
### 🔎 Ask questions about documents
Send the contents of a document and ask questions about it.
```
ollama run llava "What's in this image? /Users/jmorgan/Desktop/smile.png"
ollama run nous-hermes "$(cat input.txt)", please summarize this story
```
> **Output**: The image features a yellow smiley face, which is likely the central focus of the picture.
### 📖 Storytelling
### Pass the prompt as an argument
Venture into the unknown.
```shell
ollama run llama3.2 "Summarize this file: $(cat README.md)"
```
ollama run nous-hermes "Once upon a time"
```
> **Output**: Ollama is a lightweight, extensible framework for building and running language models on the local machine. It provides a simple API for creating, running, and managing models, as well as a library of pre-built models that can be easily used in a variety of applications.
## Advanced usage
### Show model information
### Run a local model
```shell
ollama show llama3.2
```
### List models on your computer
```shell
ollama list
ollama run ~/Downloads/vicuna-7b-v1.3.ggmlv3.q4_1.bin
```
### List which models are currently loaded
```shell
ollama ps
```
### Stop a model which is currently running
```shell
ollama stop llama3.2
```
### Generate embeddings from the CLI
```shell
ollama run embeddinggemma "Your text to embed"
```
You can also pipe text for scripted workflows:
```shell
echo "Your text to embed" | ollama run embeddinggemma
```
### Start Ollama
`ollama serve` is used when you want to start ollama without running the desktop application.
## Building
See the [developer guide](https://github.com/ollama/ollama/blob/main/docs/development.md)
### Running local builds
Next, start the server:
```shell
./ollama serve
```
go build .
```
Finally, in a separate shell, run a model:
To run it start the server:
```shell
./ollama run llama3.2
```
./ollama server &
```
## REST API
Finally, run a model!
Ollama has a REST API for running and managing models.
### Generate a response
```shell
curl http://localhost:11434/api/generate -d '{
"model": "llama3.2",
"prompt":"Why is the sky blue?"
}'
```
./ollama run ~/Downloads/vicuna-7b-v1.3.ggmlv3.q4_1.bin
```
### Chat with a model
## API Reference
```shell
curl http://localhost:11434/api/chat -d '{
"model": "llama3.2",
"messages": [
{ "role": "user", "content": "why is the sky blue?" }
]
}'
### `POST /api/pull`
Download a model
```
curl -X POST http://localhost:11343/api/pull -d '{"model": "orca"}'
```
See the [API documentation](./docs/api.md) for all endpoints.
### `POST /api/generate`
## Community Integrations
Complete a prompt
### Web & Desktop
- [Open WebUI](https://github.com/open-webui/open-webui)
- [SwiftChat (macOS with ReactNative)](https://github.com/aws-samples/swift-chat)
- [Enchanted (macOS native)](https://github.com/AugustDev/enchanted)
- [Hollama](https://github.com/fmaclen/hollama)
- [Lollms WebUI (Single user)](https://github.com/ParisNeo/lollms-webui)
- [Lollms (Multi users)](https://github.com/ParisNeo/lollms)
- [LibreChat](https://github.com/danny-avila/LibreChat)
- [Bionic GPT](https://github.com/bionic-gpt/bionic-gpt)
- [HTML UI](https://github.com/rtcfirefly/ollama-ui)
- [AI-UI](https://github.com/bajahaw/ai-ui)
- [Saddle](https://github.com/jikkuatwork/saddle)
- [TagSpaces](https://www.tagspaces.org) (A platform for file-based apps, [utilizing Ollama](https://docs.tagspaces.org/ai/) for the generation of tags and descriptions)
- [Chatbot UI](https://github.com/ivanfioravanti/chatbot-ollama)
- [Chatbot UI v2](https://github.com/mckaywrigley/chatbot-ui)
- [Typescript UI](https://github.com/ollama-interface/Ollama-Gui?tab=readme-ov-file)
- [Minimalistic React UI for Ollama Models](https://github.com/richawo/minimal-llm-ui)
- [Ollamac](https://github.com/kevinhermawan/Ollamac)
- [big-AGI](https://github.com/enricoros/big-AGI)
- [Cheshire Cat assistant framework](https://github.com/cheshire-cat-ai/core)
- [Amica](https://github.com/semperai/amica)
- [chatd](https://github.com/BruceMacD/chatd)
- [Ollama-SwiftUI](https://github.com/kghandour/Ollama-SwiftUI)
- [Dify.AI](https://github.com/langgenius/dify)
- [MindMac](https://mindmac.app)
- [NextJS Web Interface for Ollama](https://github.com/jakobhoeg/nextjs-ollama-llm-ui)
- [Msty](https://msty.app)
- [Chatbox](https://github.com/Bin-Huang/Chatbox)
- [WinForm Ollama Copilot](https://github.com/tgraupmann/WinForm_Ollama_Copilot)
- [NextChat](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web) with [Get Started Doc](https://docs.nextchat.dev/models/ollama)
- [Alpaca WebUI](https://github.com/mmo80/alpaca-webui)
- [OllamaGUI](https://github.com/enoch1118/ollamaGUI)
- [OpenAOE](https://github.com/InternLM/OpenAOE)
- [Odin Runes](https://github.com/leonid20000/OdinRunes)
- [LLM-X](https://github.com/mrdjohnson/llm-x) (Progressive Web App)
- [AnythingLLM (Docker + MacOs/Windows/Linux native app)](https://github.com/Mintplex-Labs/anything-llm)
- [Ollama Basic Chat: Uses HyperDiv Reactive UI](https://github.com/rapidarchitect/ollama_basic_chat)
- [Ollama-chats RPG](https://github.com/drazdra/ollama-chats)
- [IntelliBar](https://intellibar.app/) (AI-powered assistant for macOS)
- [Jirapt](https://github.com/AliAhmedNada/jirapt) (Jira Integration to generate issues, tasks, epics)
- [ojira](https://github.com/AliAhmedNada/ojira) (Jira chrome plugin to easily generate descriptions for tasks)
- [QA-Pilot](https://github.com/reid41/QA-Pilot) (Interactive chat tool that can leverage Ollama models for rapid understanding and navigation of GitHub code repositories)
- [ChatOllama](https://github.com/sugarforever/chat-ollama) (Open Source Chatbot based on Ollama with Knowledge Bases)
- [CRAG Ollama Chat](https://github.com/Nagi-ovo/CRAG-Ollama-Chat) (Simple Web Search with Corrective RAG)
- [RAGFlow](https://github.com/infiniflow/ragflow) (Open-source Retrieval-Augmented Generation engine based on deep document understanding)
- [StreamDeploy](https://github.com/StreamDeploy-DevRel/streamdeploy-llm-app-scaffold) (LLM Application Scaffold)
- [chat](https://github.com/swuecho/chat) (chat web app for teams)
- [Lobe Chat](https://github.com/lobehub/lobe-chat) with [Integrating Doc](https://lobehub.com/docs/self-hosting/examples/ollama)
- [Ollama RAG Chatbot](https://github.com/datvodinh/rag-chatbot.git) (Local Chat with multiple PDFs using Ollama and RAG)
- [BrainSoup](https://www.nurgo-software.com/products/brainsoup) (Flexible native client with RAG & multi-agent automation)
- [macai](https://github.com/Renset/macai) (macOS client for Ollama, ChatGPT, and other compatible API back-ends)
- [RWKV-Runner](https://github.com/josStorer/RWKV-Runner) (RWKV offline LLM deployment tool, also usable as a client for ChatGPT and Ollama)
- [Ollama Grid Search](https://github.com/dezoito/ollama-grid-search) (app to evaluate and compare models)
- [Olpaka](https://github.com/Otacon/olpaka) (User-friendly Flutter Web App for Ollama)
- [Casibase](https://casibase.org) (An open source AI knowledge base and dialogue system combining the latest RAG, SSO, ollama support, and multiple large language models.)
- [OllamaSpring](https://github.com/CrazyNeil/OllamaSpring) (Ollama Client for macOS)
- [LLocal.in](https://github.com/kartikm7/llocal) (Easy to use Electron Desktop Client for Ollama)
- [Shinkai Desktop](https://github.com/dcSpark/shinkai-apps) (Two click install Local AI using Ollama + Files + RAG)
- [AiLama](https://github.com/zeyoyt/ailama) (A Discord User App that allows you to interact with Ollama anywhere in Discord)
- [Ollama with Google Mesop](https://github.com/rapidarchitect/ollama_mesop/) (Mesop Chat Client implementation with Ollama)
- [R2R](https://github.com/SciPhi-AI/R2R) (Open-source RAG engine)
- [Ollama-Kis](https://github.com/elearningshow/ollama-kis) (A simple easy-to-use GUI with sample custom LLM for Drivers Education)
- [OpenGPA](https://opengpa.org) (Open-source offline-first Enterprise Agentic Application)
- [Painting Droid](https://github.com/mateuszmigas/painting-droid) (Painting app with AI integrations)
- [Kerlig AI](https://www.kerlig.com/) (AI writing assistant for macOS)
- [AI Studio](https://github.com/MindWorkAI/AI-Studio)
- [Sidellama](https://github.com/gyopak/sidellama) (browser-based LLM client)
- [LLMStack](https://github.com/trypromptly/LLMStack) (No-code multi-agent framework to build LLM agents and workflows)
- [BoltAI for Mac](https://boltai.com) (AI Chat Client for Mac)
- [Harbor](https://github.com/av/harbor) (Containerized LLM Toolkit with Ollama as default backend)
- [PyGPT](https://github.com/szczyglis-dev/py-gpt) (AI desktop assistant for Linux, Windows, and Mac)
- [Alpaca](https://github.com/Jeffser/Alpaca) (An Ollama client application for Linux and macOS made with GTK4 and Adwaita)
- [AutoGPT](https://github.com/Significant-Gravitas/AutoGPT/blob/master/docs/content/platform/ollama.md) (AutoGPT Ollama integration)
- [Go-CREW](https://www.jonathanhecl.com/go-crew/) (Powerful Offline RAG in Golang)
- [PartCAD](https://github.com/openvmp/partcad/) (CAD model generation with OpenSCAD and CadQuery)
- [Ollama4j Web UI](https://github.com/ollama4j/ollama4j-web-ui) - Java-based Web UI for Ollama built with Vaadin, Spring Boot, and Ollama4j
- [PyOllaMx](https://github.com/kspviswa/pyOllaMx) - macOS application capable of chatting with both Ollama and Apple MLX models.
- [Cline](https://github.com/cline/cline) - Formerly known as Claude Dev is a VS Code extension for multi-file/whole-repo coding
- [Void](https://github.com/voideditor/void) (Open source AI code editor and Cursor alternative)
- [Cherry Studio](https://github.com/kangfenmao/cherry-studio) (Desktop client with Ollama support)
- [ConfiChat](https://github.com/1runeberg/confichat) (Lightweight, standalone, multi-platform, and privacy-focused LLM chat interface with optional encryption)
- [Archyve](https://github.com/nickthecook/archyve) (RAG-enabling document library)
- [crewAI with Mesop](https://github.com/rapidarchitect/ollama-crew-mesop) (Mesop Web Interface to run crewAI with Ollama)
- [Tkinter-based client](https://github.com/chyok/ollama-gui) (Python tkinter-based Client for Ollama)
- [LLMChat](https://github.com/trendy-design/llmchat) (Privacy focused, 100% local, intuitive all-in-one chat interface)
- [Local Multimodal AI Chat](https://github.com/Leon-Sander/Local-Multimodal-AI-Chat) (Ollama-based LLM Chat with support for multiple features, including PDF RAG, voice chat, image-based interactions, and integration with OpenAI.)
- [ARGO](https://github.com/xark-argo/argo) (Locally download and run Ollama and Huggingface models with RAG and deep research on Mac/Windows/Linux)
- [OrionChat](https://github.com/EliasPereirah/OrionChat) - OrionChat is a web interface for chatting with different AI providers
- [G1](https://github.com/bklieger-groq/g1) (Prototype of using prompting strategies to improve the LLM's reasoning through o1-like reasoning chains.)
- [Web management](https://github.com/lemonit-eric-mao/ollama-web-management) (Web management page)
- [Promptery](https://github.com/promptery/promptery) (desktop client for Ollama.)
- [Ollama App](https://github.com/JHubi1/ollama-app) (Modern and easy-to-use multi-platform client for Ollama)
- [chat-ollama](https://github.com/annilq/chat-ollama) (a React Native client for Ollama)
- [SpaceLlama](https://github.com/tcsenpai/spacellama) (Firefox and Chrome extension to quickly summarize web pages with ollama in a sidebar)
- [YouLama](https://github.com/tcsenpai/youlama) (Webapp to quickly summarize any YouTube video, supporting Invidious as well)
- [DualMind](https://github.com/tcsenpai/dualmind) (Experimental app allowing two models to talk to each other in the terminal or in a web interface)
- [ollamarama-matrix](https://github.com/h1ddenpr0cess20/ollamarama-matrix) (Ollama chatbot for the Matrix chat protocol)
- [ollama-chat-app](https://github.com/anan1213095357/ollama-chat-app) (Flutter-based chat app)
- [Perfect Memory AI](https://www.perfectmemory.ai/) (Productivity AI assists personalized by what you have seen on your screen, heard, and said in the meetings)
- [Hexabot](https://github.com/hexastack/hexabot) (A conversational AI builder)
- [Reddit Rate](https://github.com/rapidarchitect/reddit_analyzer) (Search and Rate Reddit topics with a weighted summation)
- [OpenTalkGpt](https://github.com/adarshM84/OpenTalkGpt) (Chrome Extension to manage open-source models supported by Ollama, create custom models, and chat with models from a user-friendly UI)
- [VT](https://github.com/vinhnx/vt.ai) (A minimal multimodal AI chat app, with dynamic conversation routing. Supports local models via Ollama)
- [Nosia](https://github.com/nosia-ai/nosia) (Easy to install and use RAG platform based on Ollama)
- [Witsy](https://github.com/nbonamy/witsy) (An AI Desktop application available for Mac/Windows/Linux)
- [Abbey](https://github.com/US-Artificial-Intelligence/abbey) (A configurable AI interface server with notebooks, document storage, and YouTube support)
- [Minima](https://github.com/dmayboroda/minima) (RAG with on-premises or fully local workflow)
- [aidful-ollama-model-delete](https://github.com/AidfulAI/aidful-ollama-model-delete) (User interface for simplified model cleanup)
- [Perplexica](https://github.com/ItzCrazyKns/Perplexica) (An AI-powered search engine & an open-source alternative to Perplexity AI)
- [Ollama Chat WebUI for Docker ](https://github.com/oslook/ollama-webui) (Support for local docker deployment, lightweight ollama webui)
- [AI Toolkit for Visual Studio Code](https://aka.ms/ai-tooklit/ollama-docs) (Microsoft-official VS Code extension to chat, test, evaluate models with Ollama support, and use them in your AI applications.)
- [MinimalNextOllamaChat](https://github.com/anilkay/MinimalNextOllamaChat) (Minimal Web UI for Chat and Model Control)
- [Chipper](https://github.com/TilmanGriesel/chipper) AI interface for tinkerers (Ollama, Haystack RAG, Python)
- [ChibiChat](https://github.com/CosmicEventHorizon/ChibiChat) (Kotlin-based Android app to chat with Ollama and Koboldcpp API endpoints)
- [LocalLLM](https://github.com/qusaismael/localllm) (Minimal Web-App to run ollama models on it with a GUI)
- [Ollamazing](https://github.com/buiducnhat/ollamazing) (Web extension to run Ollama models)
- [OpenDeepResearcher-via-searxng](https://github.com/benhaotang/OpenDeepResearcher-via-searxng) (A Deep Research equivalent endpoint with Ollama support for running locally)
- [AntSK](https://github.com/AIDotNet/AntSK) (Out-of-the-box & Adaptable RAG Chatbot)
- [MaxKB](https://github.com/1Panel-dev/MaxKB/) (Ready-to-use & flexible RAG Chatbot)
- [yla](https://github.com/danielekp/yla) (Web interface to freely interact with your customized models)
- [LangBot](https://github.com/RockChinQ/LangBot) (LLM-based instant messaging bots platform, with Agents, RAG features, supports multiple platforms)
- [1Panel](https://github.com/1Panel-dev/1Panel/) (Web-based Linux Server Management Tool)
- [AstrBot](https://github.com/Soulter/AstrBot/) (User-friendly LLM-based multi-platform chatbot with a WebUI, supporting RAG, LLM agents, and plugins integration)
- [Reins](https://github.com/ibrahimcetin/reins) (Easily tweak parameters, customize system prompts per chat, and enhance your AI experiments with reasoning model support.)
- [Flufy](https://github.com/Aharon-Bensadoun/Flufy) (A beautiful chat interface for interacting with Ollama's API. Built with React, TypeScript, and Material-UI.)
- [Ellama](https://github.com/zeozeozeo/ellama) (Friendly native app to chat with an Ollama instance)
- [screenpipe](https://github.com/mediar-ai/screenpipe) Build agents powered by your screen history
- [Ollamb](https://github.com/hengkysteen/ollamb) (Simple yet rich in features, cross-platform built with Flutter and designed for Ollama. Try the [web demo](https://hengkysteen.github.io/demo/ollamb/).)
- [Writeopia](https://github.com/Writeopia/Writeopia) (Text editor with integration with Ollama)
- [AppFlowy](https://github.com/AppFlowy-IO/AppFlowy) (AI collaborative workspace with Ollama, cross-platform and self-hostable)
- [Lumina](https://github.com/cushydigit/lumina.git) (A lightweight, minimal React.js frontend for interacting with Ollama servers)
- [Tiny Notepad](https://pypi.org/project/tiny-notepad) (A lightweight, notepad-like interface to chat with ollama available on PyPI)
- [macLlama (macOS native)](https://github.com/hellotunamayo/macLlama) (A native macOS GUI application for interacting with Ollama models, featuring a chat interface.)
- [GPTranslate](https://github.com/philberndt/GPTranslate) (A fast and lightweight, AI powered desktop translation application written with Rust and Tauri. Features real-time translation with OpenAI/Azure/Ollama.)
- [ollama launcher](https://github.com/NGC13009/ollama-launcher) (A launcher for Ollama, aiming to provide users with convenient functions such as ollama server launching, management, or configuration.)
- [ai-hub](https://github.com/Aj-Seven/ai-hub) (AI Hub supports multiple models via API keys and Chat support via Ollama API.)
- [Mayan EDMS](https://gitlab.com/mayan-edms/mayan-edms) (Open source document management system to organize, tag, search, and automate your files with powerful Ollama driven workflows.)
- [Serene Pub](https://github.com/doolijb/serene-pub) (Beginner friendly, open source AI Roleplaying App for Windows, Mac OS and Linux. Search, download and use models with Ollama all inside the app.)
- [Andes](https://github.com/aqerd/andes) (A Visual Studio Code extension that provides a local UI interface for Ollama models)
- [KDeps](https://github.com/kdeps/kdeps) (Kdeps is an offline-first AI framework for building Dockerized full-stack AI applications declaratively using Apple PKL and integrates APIs with Ollama on the backend.)
- [Clueless](https://github.com/KashyapTan/clueless) (Open Source & Local Cluely: A desktop application LLM assistant to help you talk to anything on your screen using locally served Ollama models. Also undetectable to screenshare)
- [ollama-co2](https://github.com/carbonatedWaterOrg/ollama-co2) (FastAPI web interface for monitoring and managing local and remote Ollama servers with real-time model monitoring and concurrent downloads)
- [Hillnote](https://hillnote.com) (A Markdown-first workspace designed to supercharge your AI workflow. Create documents ready to integrate with Claude, ChatGPT, Gemini, Cursor, and more - all while keeping your work on your device.)
### Cloud
- [Google Cloud](https://cloud.google.com/run/docs/tutorials/gpu-gemma2-with-ollama)
- [Fly.io](https://fly.io/docs/python/do-more/add-ollama/)
- [Koyeb](https://www.koyeb.com/deploy/ollama)
### Tutorial
- [handy-ollama](https://github.com/datawhalechina/handy-ollama) (Chinese Tutorial for Ollama by [Datawhale ](https://github.com/datawhalechina) - China's Largest Open Source AI Learning Community)
### Terminal
- [oterm](https://github.com/ggozad/oterm)
- [Ellama Emacs client](https://github.com/s-kostyaev/ellama)
- [Emacs client](https://github.com/zweifisch/ollama)
- [neollama](https://github.com/paradoxical-dev/neollama) UI client for interacting with models from within Neovim
- [gen.nvim](https://github.com/David-Kunz/gen.nvim)
- [ollama.nvim](https://github.com/nomnivore/ollama.nvim)
- [ollero.nvim](https://github.com/marco-souza/ollero.nvim)
- [ollama-chat.nvim](https://github.com/gerazov/ollama-chat.nvim)
- [ogpt.nvim](https://github.com/huynle/ogpt.nvim)
- [gptel Emacs client](https://github.com/karthink/gptel)
- [Oatmeal](https://github.com/dustinblackman/oatmeal)
- [cmdh](https://github.com/pgibler/cmdh)
- [ooo](https://github.com/npahlfer/ooo)
- [shell-pilot](https://github.com/reid41/shell-pilot)(Interact with models via pure shell scripts on Linux or macOS)
- [tenere](https://github.com/pythops/tenere)
- [llm-ollama](https://github.com/taketwo/llm-ollama) for [Datasette's LLM CLI](https://llm.datasette.io/en/stable/).
- [typechat-cli](https://github.com/anaisbetts/typechat-cli)
- [ShellOracle](https://github.com/djcopley/ShellOracle)
- [tlm](https://github.com/yusufcanb/tlm)
- [podman-ollama](https://github.com/ericcurtin/podman-ollama)
- [gollama](https://github.com/sammcj/gollama)
- [ParLlama](https://github.com/paulrobello/parllama)
- [Ollama eBook Summary](https://github.com/cognitivetech/ollama-ebook-summary/)
- [Ollama Mixture of Experts (MOE) in 50 lines of code](https://github.com/rapidarchitect/ollama_moe)
- [vim-intelligence-bridge](https://github.com/pepo-ec/vim-intelligence-bridge) Simple interaction of "Ollama" with the Vim editor
- [x-cmd ollama](https://x-cmd.com/mod/ollama)
- [bb7](https://github.com/drunkwcodes/bb7)
- [SwollamaCLI](https://github.com/marcusziade/Swollama) bundled with the Swollama Swift package. [Demo](https://github.com/marcusziade/Swollama?tab=readme-ov-file#cli-usage)
- [aichat](https://github.com/sigoden/aichat) All-in-one LLM CLI tool featuring Shell Assistant, Chat-REPL, RAG, AI tools & agents, with access to OpenAI, Claude, Gemini, Ollama, Groq, and more.
- [PowershAI](https://github.com/rrg92/powershai) PowerShell module that brings AI to terminal on Windows, including support for Ollama
- [DeepShell](https://github.com/Abyss-c0re/deepshell) Your self-hosted AI assistant. Interactive Shell, Files and Folders analysis.
- [orbiton](https://github.com/xyproto/orbiton) Configuration-free text editor and IDE with support for tab completion with Ollama.
- [orca-cli](https://github.com/molbal/orca-cli) Ollama Registry CLI Application - Browse, pull, and download models from Ollama Registry in your terminal.
- [GGUF-to-Ollama](https://github.com/jonathanhecl/gguf-to-ollama) - Importing GGUF to Ollama made easy (multiplatform)
- [AWS-Strands-With-Ollama](https://github.com/rapidarchitect/ollama_strands) - AWS Strands Agents with Ollama Examples
- [ollama-multirun](https://github.com/attogram/ollama-multirun) - A bash shell script to run a single prompt against any or all of your locally installed ollama models, saving the output and performance statistics as easily navigable web pages. ([Demo](https://attogram.github.io/ai_test_zone/))
- [ollama-bash-toolshed](https://github.com/attogram/ollama-bash-toolshed) - Bash scripts to chat with tool using models. Add new tools to your shed with ease. Runs on Ollama.
- [hle-eval-ollama](https://github.com/mags0ft/hle-eval-ollama) - Runs benchmarks like "Humanity's Last Exam" (HLE) on your favorite local Ollama models and evaluates the quality of their responses
- [VT Code](https://github.com/vinhnx/vtcode) - VT Code is a Rust-based terminal coding agent with semantic code intelligence via Tree-sitter. Ollama integration for running local/cloud models with configurable endpoints.
### Apple Vision Pro
- [SwiftChat](https://github.com/aws-samples/swift-chat) (Cross-platform AI chat app supporting Apple Vision Pro via "Designed for iPad")
- [Enchanted](https://github.com/AugustDev/enchanted)
### Database
- [pgai](https://github.com/timescale/pgai) - PostgreSQL as a vector database (Create and search embeddings from Ollama models using pgvector)
- [Get started guide](https://github.com/timescale/pgai/blob/main/docs/vectorizer-quick-start.md)
- [MindsDB](https://github.com/mindsdb/mindsdb/blob/staging/mindsdb/integrations/handlers/ollama_handler/README.md) (Connects Ollama models with nearly 200 data platforms and apps)
- [chromem-go](https://github.com/philippgille/chromem-go/blob/v0.5.0/embed_ollama.go) with [example](https://github.com/philippgille/chromem-go/tree/v0.5.0/examples/rag-wikipedia-ollama)
- [Kangaroo](https://github.com/dbkangaroo/kangaroo) (AI-powered SQL client and admin tool for popular databases)
### Package managers
- [Pacman](https://archlinux.org/packages/extra/x86_64/ollama/)
- [Gentoo](https://github.com/gentoo/guru/tree/master/app-misc/ollama)
- [Homebrew](https://formulae.brew.sh/formula/ollama)
- [Helm Chart](https://artifacthub.io/packages/helm/ollama-helm/ollama)
- [Guix channel](https://codeberg.org/tusharhero/ollama-guix)
- [Nix package](https://search.nixos.org/packages?show=ollama&from=0&size=50&sort=relevance&type=packages&query=ollama)
- [Flox](https://flox.dev/blog/ollama-part-one)
### Libraries
- [LangChain](https://python.langchain.com/docs/integrations/chat/ollama/) and [LangChain.js](https://js.langchain.com/docs/integrations/chat/ollama/) with [example](https://js.langchain.com/docs/tutorials/local_rag/)
- [Firebase Genkit](https://firebase.google.com/docs/genkit/plugins/ollama)
- [crewAI](https://github.com/crewAIInc/crewAI)
- [Yacana](https://remembersoftwares.github.io/yacana/) (User-friendly multi-agent framework for brainstorming and executing predetermined flows with built-in tool integration)
- [Strands Agents](https://github.com/strands-agents/sdk-python) (A model-driven approach to building AI agents in just a few lines of code)
- [Spring AI](https://github.com/spring-projects/spring-ai) with [reference](https://docs.spring.io/spring-ai/reference/api/chat/ollama-chat.html) and [example](https://github.com/tzolov/ollama-tools)
- [LangChainGo](https://github.com/tmc/langchaingo/) with [example](https://github.com/tmc/langchaingo/tree/main/examples/ollama-completion-example)
- [LangChain4j](https://github.com/langchain4j/langchain4j) with [example](https://github.com/langchain4j/langchain4j-examples/tree/main/ollama-examples/src/main/java)
- [LangChainRust](https://github.com/Abraxas-365/langchain-rust) with [example](https://github.com/Abraxas-365/langchain-rust/blob/main/examples/llm_ollama.rs)
- [LangChain for .NET](https://github.com/tryAGI/LangChain) with [example](https://github.com/tryAGI/LangChain/blob/main/examples/LangChain.Samples.OpenAI/Program.cs)
- [LLPhant](https://github.com/theodo-group/LLPhant?tab=readme-ov-file#ollama)
- [LlamaIndex](https://docs.llamaindex.ai/en/stable/examples/llm/ollama/) and [LlamaIndexTS](https://ts.llamaindex.ai/modules/llms/available_llms/ollama)
- [LiteLLM](https://github.com/BerriAI/litellm)
- [OllamaFarm for Go](https://github.com/presbrey/ollamafarm)
- [OllamaSharp for .NET](https://github.com/awaescher/OllamaSharp)
- [Ollama for Ruby](https://github.com/gbaptista/ollama-ai)
- [Ollama-rs for Rust](https://github.com/pepperoni21/ollama-rs)
- [Ollama-hpp for C++](https://github.com/jmont-dev/ollama-hpp)
- [Ollama4j for Java](https://github.com/ollama4j/ollama4j)
- [ModelFusion Typescript Library](https://modelfusion.dev/integration/model-provider/ollama)
- [OllamaKit for Swift](https://github.com/kevinhermawan/OllamaKit)
- [Ollama for Dart](https://github.com/breitburg/dart-ollama)
- [Ollama for Laravel](https://github.com/cloudstudio/ollama-laravel)
- [LangChainDart](https://github.com/davidmigloz/langchain_dart)
- [Semantic Kernel - Python](https://github.com/microsoft/semantic-kernel/tree/main/python/semantic_kernel/connectors/ai/ollama)
- [Haystack](https://github.com/deepset-ai/haystack-integrations/blob/main/integrations/ollama.md)
- [Elixir LangChain](https://github.com/brainlid/langchain)
- [Ollama for R - rollama](https://github.com/JBGruber/rollama)
- [Ollama for R - ollama-r](https://github.com/hauselin/ollama-r)
- [Ollama-ex for Elixir](https://github.com/lebrunel/ollama-ex)
- [Ollama Connector for SAP ABAP](https://github.com/b-tocs/abap_btocs_ollama)
- [Testcontainers](https://testcontainers.com/modules/ollama/)
- [Portkey](https://portkey.ai/docs/welcome/integration-guides/ollama)
- [PromptingTools.jl](https://github.com/svilupp/PromptingTools.jl) with an [example](https://svilupp.github.io/PromptingTools.jl/dev/examples/working_with_ollama)
- [LlamaScript](https://github.com/Project-Llama/llamascript)
- [llm-axe](https://github.com/emirsahin1/llm-axe) (Python Toolkit for Building LLM Powered Apps)
- [Gollm](https://docs.gollm.co/examples/ollama-example)
- [Gollama for Golang](https://github.com/jonathanhecl/gollama)
- [Ollamaclient for Golang](https://github.com/xyproto/ollamaclient)
- [High-level function abstraction in Go](https://gitlab.com/tozd/go/fun)
- [Ollama PHP](https://github.com/ArdaGnsrn/ollama-php)
- [Agents-Flex for Java](https://github.com/agents-flex/agents-flex) with [example](https://github.com/agents-flex/agents-flex/tree/main/agents-flex-llm/agents-flex-llm-ollama/src/test/java/com/agentsflex/llm/ollama)
- [Parakeet](https://github.com/parakeet-nest/parakeet) is a GoLang library, made to simplify the development of small generative AI applications with Ollama.
- [Haverscript](https://github.com/andygill/haverscript) with [examples](https://github.com/andygill/haverscript/tree/main/examples)
- [Ollama for Swift](https://github.com/mattt/ollama-swift)
- [Swollama for Swift](https://github.com/guitaripod/Swollama) with [DocC](https://guitaripod.github.io/Swollama/documentation/swollama)
- [GoLamify](https://github.com/prasad89/golamify)
- [Ollama for Haskell](https://github.com/tusharad/ollama-haskell)
- [multi-llm-ts](https://github.com/nbonamy/multi-llm-ts) (A Typescript/JavaScript library allowing access to different LLM in a unified API)
- [LlmTornado](https://github.com/lofcz/llmtornado) (C# library providing a unified interface for major FOSS & Commercial inference APIs)
- [Ollama for Zig](https://github.com/dravenk/ollama-zig)
- [Abso](https://github.com/lunary-ai/abso) (OpenAI-compatible TypeScript SDK for any LLM provider)
- [Nichey](https://github.com/goodreasonai/nichey) is a Python package for generating custom wikis for your research topic
- [Ollama for D](https://github.com/kassane/ollama-d)
- [OllamaPlusPlus](https://github.com/HardCodeDev777/OllamaPlusPlus) (Very simple C++ library for Ollama)
- [any-llm](https://github.com/mozilla-ai/any-llm) (A single interface to use different llm providers by [mozilla.ai](https://www.mozilla.ai/))
- [any-agent](https://github.com/mozilla-ai/any-agent) (A single interface to use and evaluate different agent frameworks by [mozilla.ai](https://www.mozilla.ai/))
- [Neuro SAN](https://github.com/cognizant-ai-lab/neuro-san-studio) (Data-driven multi-agent orchestration framework) with [example](https://github.com/cognizant-ai-lab/neuro-san-studio/blob/main/docs/user_guide.md#ollama)
- [achatbot-go](https://github.com/ai-bot-pro/achatbot-go) a multimodal(text/audio/image) chatbot.
- [Ollama Bash Lib](https://github.com/attogram/ollama-bash-lib) - A Bash Library for Ollama. Run LLM prompts straight from your shell, and more
### Mobile
- [SwiftChat](https://github.com/aws-samples/swift-chat) (Lightning-fast Cross-platform AI chat app with native UI for Android, iOS, and iPad)
- [Enchanted](https://github.com/AugustDev/enchanted)
- [Maid](https://github.com/Mobile-Artificial-Intelligence/maid)
- [Ollama App](https://github.com/JHubi1/ollama-app) (Modern and easy-to-use multi-platform client for Ollama)
- [ConfiChat](https://github.com/1runeberg/confichat) (Lightweight, standalone, multi-platform, and privacy-focused LLM chat interface with optional encryption)
- [Ollama Android Chat](https://github.com/sunshine0523/OllamaServer) (No need for Termux, start the Ollama service with one click on an Android device)
- [Reins](https://github.com/ibrahimcetin/reins) (Easily tweak parameters, customize system prompts per chat, and enhance your AI experiments with reasoning model support.)
### Extensions & Plugins
- [Raycast extension](https://github.com/MassimilianoPasquini97/raycast_ollama)
- [Discollama](https://github.com/mxyng/discollama) (Discord bot inside the Ollama discord channel)
- [Continue](https://github.com/continuedev/continue)
- [Vibe](https://github.com/thewh1teagle/vibe) (Transcribe and analyze meetings with Ollama)
- [Obsidian Ollama plugin](https://github.com/hinterdupfinger/obsidian-ollama)
- [Logseq Ollama plugin](https://github.com/omagdy7/ollama-logseq)
- [NotesOllama](https://github.com/andersrex/notesollama) (Apple Notes Ollama plugin)
- [Dagger Chatbot](https://github.com/samalba/dagger-chatbot)
- [Discord AI Bot](https://github.com/mekb-turtle/discord-ai-bot)
- [Ollama Telegram Bot](https://github.com/ruecat/ollama-telegram)
- [Hass Ollama Conversation](https://github.com/ej52/hass-ollama-conversation)
- [Rivet plugin](https://github.com/abrenneke/rivet-plugin-ollama)
- [Obsidian BMO Chatbot plugin](https://github.com/longy2k/obsidian-bmo-chatbot)
- [Cliobot](https://github.com/herval/cliobot) (Telegram bot with Ollama support)
- [Copilot for Obsidian plugin](https://github.com/logancyang/obsidian-copilot)
- [Obsidian Local GPT plugin](https://github.com/pfrankov/obsidian-local-gpt)
- [Open Interpreter](https://docs.openinterpreter.com/language-model-setup/local-models/ollama)
- [Llama Coder](https://github.com/ex3ndr/llama-coder) (Copilot alternative using Ollama)
- [Ollama Copilot](https://github.com/bernardo-bruning/ollama-copilot) (Proxy that allows you to use Ollama as a copilot like GitHub Copilot)
- [twinny](https://github.com/rjmacarthy/twinny) (Copilot and Copilot chat alternative using Ollama)
- [Wingman-AI](https://github.com/RussellCanfield/wingman-ai) (Copilot code and chat alternative using Ollama and Hugging Face)
- [Page Assist](https://github.com/n4ze3m/page-assist) (Chrome Extension)
- [Plasmoid Ollama Control](https://github.com/imoize/plasmoid-ollamacontrol) (KDE Plasma extension that allows you to quickly manage/control Ollama model)
- [AI Telegram Bot](https://github.com/tusharhero/aitelegrambot) (Telegram bot using Ollama in backend)
- [AI ST Completion](https://github.com/yaroslavyaroslav/OpenAI-sublime-text) (Sublime Text 4 AI assistant plugin with Ollama support)
- [Discord-Ollama Chat Bot](https://github.com/kevinthedang/discord-ollama) (Generalized TypeScript Discord Bot w/ Tuning Documentation)
- [ChatGPTBox: All in one browser extension](https://github.com/josStorer/chatGPTBox) with [Integrating Tutorial](https://github.com/josStorer/chatGPTBox/issues/616#issuecomment-1975186467)
- [Discord AI chat/moderation bot](https://github.com/rapmd73/Companion) Chat/moderation bot written in python. Uses Ollama to create personalities.
- [Headless Ollama](https://github.com/nischalj10/headless-ollama) (Scripts to automatically install ollama client & models on any OS for apps that depend on ollama server)
- [Terraform AWS Ollama & Open WebUI](https://github.com/xuyangbocn/terraform-aws-self-host-llm) (A Terraform module to deploy on AWS a ready-to-use Ollama service, together with its front-end Open WebUI service.)
- [node-red-contrib-ollama](https://github.com/jakubburkiewicz/node-red-contrib-ollama)
- [Local AI Helper](https://github.com/ivostoykov/localAI) (Chrome and Firefox extensions that enable interactions with the active tab and customisable API endpoints. Includes secure storage for user prompts.)
- [LSP-AI](https://github.com/SilasMarvin/lsp-ai) (Open-source language server for AI-powered functionality)
- [QodeAssist](https://github.com/Palm1r/QodeAssist) (AI-powered coding assistant plugin for Qt Creator)
- [Obsidian Quiz Generator plugin](https://github.com/ECuiDev/obsidian-quiz-generator)
- [AI Summary Helper plugin](https://github.com/philffm/ai-summary-helper)
- [TextCraft](https://github.com/suncloudsmoon/TextCraft) (Copilot in Word alternative using Ollama)
- [Alfred Ollama](https://github.com/zeitlings/alfred-ollama) (Alfred Workflow)
- [TextLLaMA](https://github.com/adarshM84/TextLLaMA) A Chrome Extension that helps you write emails, correct grammar, and translate into any language
- [Simple-Discord-AI](https://github.com/zyphixor/simple-discord-ai)
- [LLM Telegram Bot](https://github.com/innightwolfsleep/llm_telegram_bot) (telegram bot, primary for RP. Oobabooga-like buttons, [A1111](https://github.com/AUTOMATIC1111/stable-diffusion-webui) API integration e.t.c)
- [mcp-llm](https://github.com/sammcj/mcp-llm) (MCP Server to allow LLMs to call other LLMs)
- [SimpleOllamaUnity](https://github.com/HardCodeDev777/SimpleOllamaUnity) (Unity Engine extension for communicating with Ollama in a few lines of code. Also works at runtime)
- [UnityCodeLama](https://github.com/HardCodeDev777/UnityCodeLama) (Unity Editor tool to analyze scripts via Ollama)
- [NativeMind](https://github.com/NativeMindBrowser/NativeMindExtension) (Private, on-device AI Assistant, no cloud dependencies)
- [GMAI - Gradle Managed AI](https://gmai.premex.se/) (Gradle plugin for automated Ollama lifecycle management during build phases)
- [NOMYO Router](https://github.com/nomyo-ai/nomyo-router) (A transparent Ollama proxy with model deployment aware routing which auto-manages multiple Ollama instances in a given network)
### Supported backends
- [llama.cpp](https://github.com/ggml-org/llama.cpp) project founded by Georgi Gerganov.
### Observability
- [Opik](https://www.comet.com/docs/opik/cookbook/ollama) is an open-source platform to debug, evaluate, and monitor your LLM applications, RAG systems, and agentic workflows with comprehensive tracing, automated evaluations, and production-ready dashboards. Opik supports native integration to Ollama.
- [Lunary](https://lunary.ai/docs/integrations/ollama) is the leading open-source LLM observability platform. It provides a variety of enterprise-grade features such as real-time analytics, prompt templates management, PII masking, and comprehensive agent tracing.
- [OpenLIT](https://github.com/openlit/openlit) is an OpenTelemetry-native tool for monitoring Ollama Applications & GPUs using traces and metrics.
- [HoneyHive](https://docs.honeyhive.ai/integrations/ollama) is an AI observability and evaluation platform for AI agents. Use HoneyHive to evaluate agent performance, interrogate failures, and monitor quality in production.
- [Langfuse](https://langfuse.com/docs/integrations/ollama) is an open source LLM observability platform that enables teams to collaboratively monitor, evaluate and debug AI applications.
- [MLflow Tracing](https://mlflow.org/docs/latest/llms/tracing/index.html#automatic-tracing) is an open source LLM observability tool with a convenient API to log and visualize traces, making it easy to debug and evaluate GenAI applications.
### Security
- [Ollama Fortress](https://github.com/ParisNeo/ollama_proxy_server)
```
curl -X POST http://localhost:11434/api/generate -d '{"model": "orca", "prompt": "hello!"}'
```

View File

@@ -1,25 +0,0 @@
# Security
The Ollama maintainer team takes security seriously and will actively work to resolve security issues.
## Reporting a vulnerability
If you discover a security vulnerability, please do not open a public issue. Instead, please report it by emailing hello@ollama.com. We ask that you give us sufficient time to investigate and address the vulnerability before disclosing it publicly.
Please include the following details in your report:
- A description of the vulnerability
- Steps to reproduce the issue
- Your assessment of the potential impact
- Any possible mitigations
## Security best practices
While the maintainer team does its best to secure Ollama, users are encouraged to implement their own security best practices, such as:
- Regularly updating to the latest version of Ollama
- Securing access to hosted instances of Ollama
- Monitoring systems for unusual activity
## Contact
For any other questions or concerns related to security, please contact us at hello@ollama.com

View File

@@ -1,16 +1,3 @@
// Package api implements the client-side API for code wishing to interact
// with the ollama service. The methods of the [Client] type correspond to
// the ollama REST API as described in [the API documentation].
// The ollama command-line client itself uses this package to interact with
// the backend service.
//
// # Examples
//
// Several examples of using this package are available [in the GitHub
// repository].
//
// [the API documentation]: https://github.com/ollama/ollama/blob/main/docs/api.md
// [in the GitHub repository]: https://github.com/ollama/ollama/tree/main/api/examples
package api
import (
@@ -18,157 +5,42 @@ import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"runtime"
"strconv"
"time"
"github.com/ollama/ollama/auth"
"github.com/ollama/ollama/envconfig"
"github.com/ollama/ollama/format"
"github.com/ollama/ollama/version"
)
// Client encapsulates client state for interacting with the ollama
// service. Use [ClientFromEnvironment] to create new Clients.
type StatusError struct {
StatusCode int
Status string
Message string
}
func (e StatusError) Error() string {
if e.Message != "" {
return fmt.Sprintf("%s: %s", e.Status, e.Message)
}
return e.Status
}
type Client struct {
base *url.URL
http *http.Client
base url.URL
}
func checkError(resp *http.Response, body []byte) error {
if resp.StatusCode < http.StatusBadRequest {
return nil
func NewClient(hosts ...string) *Client {
host := "127.0.0.1:11434"
if len(hosts) > 0 {
host = hosts[0]
}
if resp.StatusCode == http.StatusUnauthorized {
authError := AuthorizationError{StatusCode: resp.StatusCode}
json.Unmarshal(body, &authError)
return authError
}
apiError := StatusError{StatusCode: resp.StatusCode}
err := json.Unmarshal(body, &apiError)
if err != nil {
// Use the full body as the message if we fail to decode a response.
apiError.ErrorMessage = string(body)
}
return apiError
}
// ClientFromEnvironment creates a new [Client] using configuration from the
// environment variable OLLAMA_HOST, which points to the network host and
// port on which the ollama service is listening. The format of this variable
// is:
//
// <scheme>://<host>:<port>
//
// If the variable is not specified, a default ollama host and port will be
// used.
func ClientFromEnvironment() (*Client, error) {
return &Client{
base: envconfig.Host(),
http: http.DefaultClient,
}, nil
}
func NewClient(base *url.URL, http *http.Client) *Client {
return &Client{
base: base,
http: http,
base: url.URL{Scheme: "http", Host: host},
}
}
func getAuthorizationToken(ctx context.Context, challenge string) (string, error) {
token, err := auth.Sign(ctx, []byte(challenge))
if err != nil {
return "", err
}
return token, nil
}
func (c *Client) do(ctx context.Context, method, path string, reqData, respData any) error {
var reqBody io.Reader
var data []byte
var err error
switch reqData := reqData.(type) {
case io.Reader:
// reqData is already an io.Reader
reqBody = reqData
case nil:
// noop
default:
data, err = json.Marshal(reqData)
if err != nil {
return err
}
reqBody = bytes.NewReader(data)
}
requestURL := c.base.JoinPath(path)
var token string
if envconfig.UseAuth() || c.base.Hostname() == "ollama.com" {
now := strconv.FormatInt(time.Now().Unix(), 10)
chal := fmt.Sprintf("%s,%s?ts=%s", method, path, now)
token, err = getAuthorizationToken(ctx, chal)
if err != nil {
return err
}
q := requestURL.Query()
q.Set("ts", now)
requestURL.RawQuery = q.Encode()
}
request, err := http.NewRequestWithContext(ctx, method, requestURL.String(), reqBody)
if err != nil {
return err
}
request.Header.Set("Content-Type", "application/json")
request.Header.Set("Accept", "application/json")
request.Header.Set("User-Agent", fmt.Sprintf("ollama/%s (%s %s) Go/%s", version.Version, runtime.GOARCH, runtime.GOOS, runtime.Version()))
if token != "" {
request.Header.Set("Authorization", token)
}
respObj, err := c.http.Do(request)
if err != nil {
return err
}
defer respObj.Body.Close()
respBody, err := io.ReadAll(respObj.Body)
if err != nil {
return err
}
if err := checkError(respObj, respBody); err != nil {
return err
}
if len(respBody) > 0 && respData != nil {
if err := json.Unmarshal(respBody, respData); err != nil {
return err
}
}
return nil
}
const maxBufferSize = 512 * format.KiloByte
func (c *Client) stream(ctx context.Context, method, path string, data any, fn func([]byte) error) error {
var buf io.Reader
var buf *bytes.Buffer
if data != nil {
bts, err := json.Marshal(data)
if err != nil {
@@ -178,80 +50,37 @@ func (c *Client) stream(ctx context.Context, method, path string, data any, fn f
buf = bytes.NewBuffer(bts)
}
requestURL := c.base.JoinPath(path)
var token string
if envconfig.UseAuth() || c.base.Hostname() == "ollama.com" {
var err error
now := strconv.FormatInt(time.Now().Unix(), 10)
chal := fmt.Sprintf("%s,%s?ts=%s", method, path, now)
token, err = getAuthorizationToken(ctx, chal)
if err != nil {
return err
}
q := requestURL.Query()
q.Set("ts", now)
requestURL.RawQuery = q.Encode()
}
request, err := http.NewRequestWithContext(ctx, method, requestURL.String(), buf)
request, err := http.NewRequestWithContext(ctx, method, c.base.JoinPath(path).String(), buf)
if err != nil {
return err
}
request.Header.Set("Content-Type", "application/json")
request.Header.Set("Accept", "application/x-ndjson")
request.Header.Set("User-Agent", fmt.Sprintf("ollama/%s (%s %s) Go/%s", version.Version, runtime.GOARCH, runtime.GOOS, runtime.Version()))
request.Header.Set("Accept", "application/json")
if token != "" {
request.Header.Set("Authorization", token)
}
response, err := c.http.Do(request)
response, err := http.DefaultClient.Do(request)
if err != nil {
return err
}
defer response.Body.Close()
scanner := bufio.NewScanner(response.Body)
// increase the buffer size to avoid running out of space
scanBuf := make([]byte, 0, maxBufferSize)
scanner.Buffer(scanBuf, maxBufferSize)
for scanner.Scan() {
var errorResponse struct {
Error string `json:"error,omitempty"`
SigninURL string `json:"signin_url,omitempty"`
Error string `json:"error,omitempty"`
}
bts := scanner.Bytes()
if err := json.Unmarshal(bts, &errorResponse); err != nil {
if response.StatusCode >= http.StatusBadRequest {
return StatusError{
StatusCode: response.StatusCode,
Status: response.Status,
ErrorMessage: string(bts),
}
}
return errors.New(string(bts))
return fmt.Errorf("unmarshal: %w", err)
}
if response.StatusCode == http.StatusUnauthorized {
return AuthorizationError{
if response.StatusCode >= 400 {
return StatusError{
StatusCode: response.StatusCode,
Status: response.Status,
SigninURL: errorResponse.SigninURL,
Message: errorResponse.Error,
}
} else if response.StatusCode >= http.StatusBadRequest {
return StatusError{
StatusCode: response.StatusCode,
Status: response.Status,
ErrorMessage: errorResponse.Error,
}
}
if errorResponse.Error != "" {
return errors.New(errorResponse.Error)
}
if err := fn(bts); err != nil {
@@ -262,14 +91,8 @@ func (c *Client) stream(ctx context.Context, method, path string, data any, fn f
return nil
}
// GenerateResponseFunc is a function that [Client.Generate] invokes every time
// a response is received from the service. If this function returns an error,
// [Client.Generate] will stop generating and return this error.
type GenerateResponseFunc func(GenerateResponse) error
// Generate generates a response for a given prompt. The req parameter should
// be populated with prompt details. fn is called for each response (there may
// be multiple responses, e.g. in case streaming is enabled).
func (c *Client) Generate(ctx context.Context, req *GenerateRequest, fn GenerateResponseFunc) error {
return c.stream(ctx, http.MethodPost, "/api/generate", req, func(bts []byte) error {
var resp GenerateResponse
@@ -281,37 +104,11 @@ func (c *Client) Generate(ctx context.Context, req *GenerateRequest, fn Generate
})
}
// ChatResponseFunc is a function that [Client.Chat] invokes every time
// a response is received from the service. If this function returns an error,
// [Client.Chat] will stop generating and return this error.
type ChatResponseFunc func(ChatResponse) error
type PullProgressFunc func(PullProgress) error
// Chat generates the next message in a chat. [ChatRequest] may contain a
// sequence of messages which can be used to maintain chat history with a model.
// fn is called for each response (there may be multiple responses, e.g. if case
// streaming is enabled).
func (c *Client) Chat(ctx context.Context, req *ChatRequest, fn ChatResponseFunc) error {
return c.stream(ctx, http.MethodPost, "/api/chat", req, func(bts []byte) error {
var resp ChatResponse
if err := json.Unmarshal(bts, &resp); err != nil {
return err
}
return fn(resp)
})
}
// PullProgressFunc is a function that [Client.Pull] invokes every time there
// is progress with a "pull" request sent to the service. If this function
// returns an error, [Client.Pull] will stop the process and return this error.
type PullProgressFunc func(ProgressResponse) error
// Pull downloads a model from the ollama library. fn is called each time
// progress is made on the request and can be used to display a progress bar,
// etc.
func (c *Client) Pull(ctx context.Context, req *PullRequest, fn PullProgressFunc) error {
return c.stream(ctx, http.MethodPost, "/api/pull", req, func(bts []byte) error {
var resp ProgressResponse
var resp PullProgress
if err := json.Unmarshal(bts, &resp); err != nil {
return err
}
@@ -320,17 +117,11 @@ func (c *Client) Pull(ctx context.Context, req *PullRequest, fn PullProgressFunc
})
}
// PushProgressFunc is a function that [Client.Push] invokes when progress is
// made.
// It's similar to other progress function types like [PullProgressFunc].
type PushProgressFunc func(ProgressResponse) error
type PushProgressFunc func(PushProgress) error
// Push uploads a model to the model library; requires registering for ollama.ai
// and adding a public key first. fn is called each time progress is made on
// the request and can be used to display a progress bar, etc.
func (c *Client) Push(ctx context.Context, req *PushRequest, fn PushProgressFunc) error {
return c.stream(ctx, http.MethodPost, "/api/push", req, func(bts []byte) error {
var resp ProgressResponse
var resp PushProgress
if err := json.Unmarshal(bts, &resp); err != nil {
return err
}
@@ -339,18 +130,11 @@ func (c *Client) Push(ctx context.Context, req *PushRequest, fn PushProgressFunc
})
}
// CreateProgressFunc is a function that [Client.Create] invokes when progress
// is made.
// It's similar to other progress function types like [PullProgressFunc].
type CreateProgressFunc func(ProgressResponse) error
type CreateProgressFunc func(CreateProgress) error
// Create creates a model from a [Modelfile]. fn is a progress function that
// behaves similarly to other methods (see [Client.Pull]).
//
// [Modelfile]: https://github.com/ollama/ollama/blob/main/docs/modelfile.md
func (c *Client) Create(ctx context.Context, req *CreateRequest, fn CreateProgressFunc) error {
return c.stream(ctx, http.MethodPost, "/api/create", req, func(bts []byte) error {
var resp ProgressResponse
var resp CreateProgress
if err := json.Unmarshal(bts, &resp); err != nil {
return err
}
@@ -358,111 +142,3 @@ func (c *Client) Create(ctx context.Context, req *CreateRequest, fn CreateProgre
return fn(resp)
})
}
// List lists models that are available locally.
func (c *Client) List(ctx context.Context) (*ListResponse, error) {
var lr ListResponse
if err := c.do(ctx, http.MethodGet, "/api/tags", nil, &lr); err != nil {
return nil, err
}
return &lr, nil
}
// ListRunning lists running models.
func (c *Client) ListRunning(ctx context.Context) (*ProcessResponse, error) {
var lr ProcessResponse
if err := c.do(ctx, http.MethodGet, "/api/ps", nil, &lr); err != nil {
return nil, err
}
return &lr, nil
}
// Copy copies a model - creating a model with another name from an existing
// model.
func (c *Client) Copy(ctx context.Context, req *CopyRequest) error {
if err := c.do(ctx, http.MethodPost, "/api/copy", req, nil); err != nil {
return err
}
return nil
}
// Delete deletes a model and its data.
func (c *Client) Delete(ctx context.Context, req *DeleteRequest) error {
if err := c.do(ctx, http.MethodDelete, "/api/delete", req, nil); err != nil {
return err
}
return nil
}
// Show obtains model information, including details, modelfile, license etc.
func (c *Client) Show(ctx context.Context, req *ShowRequest) (*ShowResponse, error) {
var resp ShowResponse
if err := c.do(ctx, http.MethodPost, "/api/show", req, &resp); err != nil {
return nil, err
}
return &resp, nil
}
// Heartbeat checks if the server has started and is responsive; if yes, it
// returns nil, otherwise an error.
func (c *Client) Heartbeat(ctx context.Context) error {
if err := c.do(ctx, http.MethodHead, "/", nil, nil); err != nil {
return err
}
return nil
}
// Embed generates embeddings from a model.
func (c *Client) Embed(ctx context.Context, req *EmbedRequest) (*EmbedResponse, error) {
var resp EmbedResponse
if err := c.do(ctx, http.MethodPost, "/api/embed", req, &resp); err != nil {
return nil, err
}
return &resp, nil
}
// Embeddings generates an embedding from a model.
func (c *Client) Embeddings(ctx context.Context, req *EmbeddingRequest) (*EmbeddingResponse, error) {
var resp EmbeddingResponse
if err := c.do(ctx, http.MethodPost, "/api/embeddings", req, &resp); err != nil {
return nil, err
}
return &resp, nil
}
// CreateBlob creates a blob from a file on the server. digest is the
// expected SHA256 digest of the file, and r represents the file.
func (c *Client) CreateBlob(ctx context.Context, digest string, r io.Reader) error {
return c.do(ctx, http.MethodPost, fmt.Sprintf("/api/blobs/%s", digest), r, nil)
}
// Version returns the Ollama server version as a string.
func (c *Client) Version(ctx context.Context) (string, error) {
var version struct {
Version string `json:"version"`
}
if err := c.do(ctx, http.MethodGet, "/api/version", nil, &version); err != nil {
return "", err
}
return version.Version, nil
}
// Signout will signout a client for a local ollama server.
func (c *Client) Signout(ctx context.Context) error {
return c.do(ctx, http.MethodPost, "/api/signout", nil, nil)
}
// Disconnect will disconnect an ollama instance from ollama.com.
func (c *Client) Disconnect(ctx context.Context, encodedKey string) error {
return c.do(ctx, http.MethodDelete, fmt.Sprintf("/api/user/keys/%s", encodedKey), nil, nil)
}
func (c *Client) Whoami(ctx context.Context) (*UserResponse, error) {
var resp UserResponse
if err := c.do(ctx, http.MethodPost, "/api/me", nil, &resp); err != nil {
return nil, err
}
return &resp, nil
}

View File

@@ -1,322 +0,0 @@
package api
import (
"encoding/json"
"fmt"
"net/http"
"net/http/httptest"
"net/url"
"strings"
"testing"
)
func TestClientFromEnvironment(t *testing.T) {
type testCase struct {
value string
expect string
err error
}
testCases := map[string]*testCase{
"empty": {value: "", expect: "http://127.0.0.1:11434"},
"only address": {value: "1.2.3.4", expect: "http://1.2.3.4:11434"},
"only port": {value: ":1234", expect: "http://:1234"},
"address and port": {value: "1.2.3.4:1234", expect: "http://1.2.3.4:1234"},
"scheme http and address": {value: "http://1.2.3.4", expect: "http://1.2.3.4:80"},
"scheme https and address": {value: "https://1.2.3.4", expect: "https://1.2.3.4:443"},
"scheme, address, and port": {value: "https://1.2.3.4:1234", expect: "https://1.2.3.4:1234"},
"hostname": {value: "example.com", expect: "http://example.com:11434"},
"hostname and port": {value: "example.com:1234", expect: "http://example.com:1234"},
"scheme http and hostname": {value: "http://example.com", expect: "http://example.com:80"},
"scheme https and hostname": {value: "https://example.com", expect: "https://example.com:443"},
"scheme, hostname, and port": {value: "https://example.com:1234", expect: "https://example.com:1234"},
"trailing slash": {value: "example.com/", expect: "http://example.com:11434"},
"trailing slash port": {value: "example.com:1234/", expect: "http://example.com:1234"},
}
for k, v := range testCases {
t.Run(k, func(t *testing.T) {
t.Setenv("OLLAMA_HOST", v.value)
client, err := ClientFromEnvironment()
if err != v.err {
t.Fatalf("expected %s, got %s", v.err, err)
}
if client.base.String() != v.expect {
t.Fatalf("expected %s, got %s", v.expect, client.base.String())
}
})
}
}
// testError represents an internal error type with status code and message
// this is used since the error response from the server is not a standard error struct
type testError struct {
message string
statusCode int
raw bool // if true, write message as-is instead of JSON encoding
}
func (e testError) Error() string {
return e.message
}
func TestClientStream(t *testing.T) {
testCases := []struct {
name string
responses []any
wantErr string
}{
{
name: "immediate error response",
responses: []any{
testError{
message: "test error message",
statusCode: http.StatusBadRequest,
},
},
wantErr: "test error message",
},
{
name: "error after successful chunks, ok response",
responses: []any{
ChatResponse{Message: Message{Content: "partial response 1"}},
ChatResponse{Message: Message{Content: "partial response 2"}},
testError{
message: "mid-stream error",
statusCode: http.StatusOK,
},
},
wantErr: "mid-stream error",
},
{
name: "http status error takes precedence over general error",
responses: []any{
testError{
message: "custom error message",
statusCode: http.StatusInternalServerError,
},
},
wantErr: "500",
},
{
name: "successful stream completion",
responses: []any{
ChatResponse{Message: Message{Content: "chunk 1"}},
ChatResponse{Message: Message{Content: "chunk 2"}},
ChatResponse{
Message: Message{Content: "final chunk"},
Done: true,
DoneReason: "stop",
},
},
},
{
name: "plain text error response",
responses: []any{
"internal server error",
},
wantErr: "internal server error",
},
{
name: "HTML error page",
responses: []any{
"<html><body>404 Not Found</body></html>",
},
wantErr: "404 Not Found",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
flusher, ok := w.(http.Flusher)
if !ok {
t.Fatal("expected http.Flusher")
}
w.Header().Set("Content-Type", "application/x-ndjson")
for _, resp := range tc.responses {
if errResp, ok := resp.(testError); ok {
w.WriteHeader(errResp.statusCode)
err := json.NewEncoder(w).Encode(map[string]string{
"error": errResp.message,
})
if err != nil {
t.Fatal("failed to encode error response:", err)
}
return
}
if str, ok := resp.(string); ok {
fmt.Fprintln(w, str)
flusher.Flush()
continue
}
if err := json.NewEncoder(w).Encode(resp); err != nil {
t.Fatalf("failed to encode response: %v", err)
}
flusher.Flush()
}
}))
defer ts.Close()
client := NewClient(&url.URL{Scheme: "http", Host: ts.Listener.Addr().String()}, http.DefaultClient)
var receivedChunks []ChatResponse
err := client.stream(t.Context(), http.MethodPost, "/v1/chat", nil, func(chunk []byte) error {
var resp ChatResponse
if err := json.Unmarshal(chunk, &resp); err != nil {
return fmt.Errorf("failed to unmarshal chunk: %w", err)
}
receivedChunks = append(receivedChunks, resp)
return nil
})
if tc.wantErr != "" {
if err == nil {
t.Fatal("expected error but got nil")
}
if !strings.Contains(err.Error(), tc.wantErr) {
t.Errorf("expected error containing %q, got %v", tc.wantErr, err)
}
return
}
if err != nil {
t.Errorf("unexpected error: %v", err)
}
})
}
}
func TestClientDo(t *testing.T) {
testCases := []struct {
name string
response any
wantErr string
wantStatusCode int
}{
{
name: "immediate error response",
response: testError{
message: "test error message",
statusCode: http.StatusBadRequest,
},
wantErr: "test error message",
wantStatusCode: http.StatusBadRequest,
},
{
name: "server error response",
response: testError{
message: "internal error",
statusCode: http.StatusInternalServerError,
},
wantErr: "internal error",
wantStatusCode: http.StatusInternalServerError,
},
{
name: "successful response",
response: struct {
ID string `json:"id"`
Success bool `json:"success"`
}{
ID: "msg_123",
Success: true,
},
},
{
name: "plain text error response",
response: testError{
message: "internal server error",
statusCode: http.StatusInternalServerError,
raw: true,
},
wantErr: "internal server error",
wantStatusCode: http.StatusInternalServerError,
},
{
name: "HTML error page",
response: testError{
message: "<html><body>404 Not Found</body></html>",
statusCode: http.StatusNotFound,
raw: true,
},
wantErr: "<html><body>404 Not Found</body></html>",
wantStatusCode: http.StatusNotFound,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if errResp, ok := tc.response.(testError); ok {
w.WriteHeader(errResp.statusCode)
if !errResp.raw {
err := json.NewEncoder(w).Encode(map[string]string{
"error": errResp.message,
})
if err != nil {
t.Fatal("failed to encode error response:", err)
}
} else {
// Write raw message (simulates non-JSON error responses)
fmt.Fprint(w, errResp.message)
}
return
}
w.Header().Set("Content-Type", "application/json")
if err := json.NewEncoder(w).Encode(tc.response); err != nil {
t.Fatalf("failed to encode response: %v", err)
}
}))
defer ts.Close()
client := NewClient(&url.URL{Scheme: "http", Host: ts.Listener.Addr().String()}, http.DefaultClient)
var resp struct {
ID string `json:"id"`
Success bool `json:"success"`
}
err := client.do(t.Context(), http.MethodPost, "/v1/messages", nil, &resp)
if tc.wantErr != "" {
if err == nil {
t.Fatalf("got nil, want error %q", tc.wantErr)
}
if err.Error() != tc.wantErr {
t.Errorf("error message mismatch: got %q, want %q", err.Error(), tc.wantErr)
}
if tc.wantStatusCode != 0 {
if statusErr, ok := err.(StatusError); ok {
if statusErr.StatusCode != tc.wantStatusCode {
t.Errorf("status code mismatch: got %d, want %d", statusErr.StatusCode, tc.wantStatusCode)
}
} else {
t.Errorf("expected StatusError, got %T", err)
}
}
return
}
if err != nil {
t.Fatalf("got error %q, want nil", err)
}
if expectedResp, ok := tc.response.(struct {
ID string `json:"id"`
Success bool `json:"success"`
}); ok {
if resp.ID != expectedResp.ID {
t.Errorf("response ID mismatch: got %q, want %q", resp.ID, expectedResp.ID)
}
if resp.Success != expectedResp.Success {
t.Errorf("response Success mismatch: got %v, want %v", resp.Success, expectedResp.Success)
}
}
})
}
}

View File

@@ -1,18 +0,0 @@
# Ollama API Examples
Run the examples in this directory with:
```shell
go run example_name/main.go
```
## Chat - Chat with a model
- [chat/main.go](chat/main.go)
## Generate - Generate text from a model
- [generate/main.go](generate/main.go)
- [generate-streaming/main.go](generate-streaming/main.go)
## Pull - Pull a model
- [pull-progress/main.go](pull-progress/main.go)

View File

@@ -1,51 +0,0 @@
package main
import (
"context"
"fmt"
"log"
"github.com/ollama/ollama/api"
)
func main() {
client, err := api.ClientFromEnvironment()
if err != nil {
log.Fatal(err)
}
messages := []api.Message{
{
Role: "system",
Content: "Provide very brief, concise responses",
},
{
Role: "user",
Content: "Name some unusual animals",
},
{
Role: "assistant",
Content: "Monotreme, platypus, echidna",
},
{
Role: "user",
Content: "which of these is the most dangerous?",
},
}
ctx := context.Background()
req := &api.ChatRequest{
Model: "llama3.2",
Messages: messages,
}
respFunc := func(resp api.ChatResponse) error {
fmt.Print(resp.Message.Content)
return nil
}
err = client.Chat(ctx, req, respFunc)
if err != nil {
log.Fatal(err)
}
}

View File

@@ -1,40 +0,0 @@
package main
import (
"context"
"fmt"
"log"
"github.com/ollama/ollama/api"
)
func main() {
client, err := api.ClientFromEnvironment()
if err != nil {
log.Fatal(err)
}
// By default, GenerateRequest is streaming.
req := &api.GenerateRequest{
Model: "gemma2",
Prompt: "how many planets are there?",
}
ctx := context.Background()
respFunc := func(resp api.GenerateResponse) error {
// Only print the response here; GenerateResponse has a number of other
// interesting fields you want to examine.
// In streaming mode, responses are partial so we call fmt.Print (and not
// Println) in order to avoid spurious newlines being introduced. The
// model will insert its own newlines if it wants.
fmt.Print(resp.Response)
return nil
}
err = client.Generate(ctx, req, respFunc)
if err != nil {
log.Fatal(err)
}
fmt.Println()
}

View File

@@ -1,37 +0,0 @@
package main
import (
"context"
"fmt"
"log"
"github.com/ollama/ollama/api"
)
func main() {
client, err := api.ClientFromEnvironment()
if err != nil {
log.Fatal(err)
}
req := &api.GenerateRequest{
Model: "gemma2",
Prompt: "how many planets are there?",
// set streaming to false
Stream: new(bool),
}
ctx := context.Background()
respFunc := func(resp api.GenerateResponse) error {
// Only print the response here; GenerateResponse has a number of other
// interesting fields you want to examine.
fmt.Println(resp.Response)
return nil
}
err = client.Generate(ctx, req, respFunc)
if err != nil {
log.Fatal(err)
}
}

View File

@@ -1,47 +0,0 @@
package main
import (
"context"
"fmt"
"log"
"os"
"github.com/ollama/ollama/api"
)
func main() {
if len(os.Args) <= 1 {
log.Fatal("usage: <image name>")
}
imgData, err := os.ReadFile(os.Args[1])
if err != nil {
log.Fatal(err)
}
client, err := api.ClientFromEnvironment()
if err != nil {
log.Fatal(err)
}
req := &api.GenerateRequest{
Model: "llava",
Prompt: "describe this image",
Images: []api.ImageData{imgData},
}
ctx := context.Background()
respFunc := func(resp api.GenerateResponse) error {
// In streaming mode, responses are partial so we call fmt.Print (and not
// Println) in order to avoid spurious newlines being introduced. The
// model will insert its own newlines if it wants.
fmt.Print(resp.Response)
return nil
}
err = client.Generate(ctx, req, respFunc)
if err != nil {
log.Fatal(err)
}
fmt.Println()
}

View File

@@ -1,31 +0,0 @@
package main
import (
"context"
"fmt"
"log"
"github.com/ollama/ollama/api"
)
func main() {
client, err := api.ClientFromEnvironment()
if err != nil {
log.Fatal(err)
}
ctx := context.Background()
req := &api.PullRequest{
Model: "mistral",
}
progressFunc := func(resp api.ProgressResponse) error {
fmt.Printf("Progress: status=%v, total=%v, completed=%v\n", resp.Status, resp.Total, resp.Completed)
return nil
}
err = client.Pull(ctx, req, progressFunc)
if err != nil {
log.Fatal(err)
}
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,552 +0,0 @@
package api
import (
"encoding/json"
"errors"
"math"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestKeepAliveParsingFromJSON(t *testing.T) {
tests := []struct {
name string
req string
exp *Duration
}{
{
name: "Unset",
req: `{ }`,
exp: nil,
},
{
name: "Positive Integer",
req: `{ "keep_alive": 42 }`,
exp: &Duration{42 * time.Second},
},
{
name: "Positive Float",
req: `{ "keep_alive": 42.5 }`,
exp: &Duration{42500 * time.Millisecond},
},
{
name: "Positive Integer String",
req: `{ "keep_alive": "42m" }`,
exp: &Duration{42 * time.Minute},
},
{
name: "Negative Integer",
req: `{ "keep_alive": -1 }`,
exp: &Duration{math.MaxInt64},
},
{
name: "Negative Float",
req: `{ "keep_alive": -3.14 }`,
exp: &Duration{math.MaxInt64},
},
{
name: "Negative Integer String",
req: `{ "keep_alive": "-1m" }`,
exp: &Duration{math.MaxInt64},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
var dec ChatRequest
err := json.Unmarshal([]byte(test.req), &dec)
require.NoError(t, err)
assert.Equal(t, test.exp, dec.KeepAlive)
})
}
}
func TestDurationMarshalUnmarshal(t *testing.T) {
tests := []struct {
name string
input time.Duration
expected time.Duration
}{
{
"negative duration",
time.Duration(-1),
time.Duration(math.MaxInt64),
},
{
"positive duration",
42 * time.Second,
42 * time.Second,
},
{
"another positive duration",
42 * time.Minute,
42 * time.Minute,
},
{
"zero duration",
time.Duration(0),
time.Duration(0),
},
{
"max duration",
time.Duration(math.MaxInt64),
time.Duration(math.MaxInt64),
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
b, err := json.Marshal(Duration{test.input})
require.NoError(t, err)
var d Duration
err = json.Unmarshal(b, &d)
require.NoError(t, err)
assert.Equal(t, test.expected, d.Duration, "input %v, marshalled %v, got %v", test.input, string(b), d.Duration)
})
}
}
func TestUseMmapParsingFromJSON(t *testing.T) {
tr := true
fa := false
tests := []struct {
name string
req string
exp *bool
}{
{
name: "Undefined",
req: `{ }`,
exp: nil,
},
{
name: "True",
req: `{ "use_mmap": true }`,
exp: &tr,
},
{
name: "False",
req: `{ "use_mmap": false }`,
exp: &fa,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
var oMap map[string]any
err := json.Unmarshal([]byte(test.req), &oMap)
require.NoError(t, err)
opts := DefaultOptions()
err = opts.FromMap(oMap)
require.NoError(t, err)
assert.Equal(t, test.exp, opts.UseMMap)
})
}
}
func TestUseMmapFormatParams(t *testing.T) {
tr := true
fa := false
tests := []struct {
name string
req map[string][]string
exp *bool
err error
}{
{
name: "True",
req: map[string][]string{
"use_mmap": {"true"},
},
exp: &tr,
err: nil,
},
{
name: "False",
req: map[string][]string{
"use_mmap": {"false"},
},
exp: &fa,
err: nil,
},
{
name: "Numeric True",
req: map[string][]string{
"use_mmap": {"1"},
},
exp: &tr,
err: nil,
},
{
name: "Numeric False",
req: map[string][]string{
"use_mmap": {"0"},
},
exp: &fa,
err: nil,
},
{
name: "invalid string",
req: map[string][]string{
"use_mmap": {"foo"},
},
exp: nil,
err: errors.New("invalid bool value [foo]"),
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
resp, err := FormatParams(test.req)
require.Equal(t, test.err, err)
respVal, ok := resp["use_mmap"]
if test.exp != nil {
assert.True(t, ok, "resp: %v", resp)
assert.Equal(t, *test.exp, *respVal.(*bool))
}
})
}
}
func TestMessage_UnmarshalJSON(t *testing.T) {
tests := []struct {
input string
expected string
}{
{`{"role": "USER", "content": "Hello!"}`, "user"},
{`{"role": "System", "content": "Initialization complete."}`, "system"},
{`{"role": "assistant", "content": "How can I help you?"}`, "assistant"},
{`{"role": "TOOl", "content": "Access granted."}`, "tool"},
}
for _, test := range tests {
var msg Message
if err := json.Unmarshal([]byte(test.input), &msg); err != nil {
t.Errorf("Unexpected error: %v", err)
}
if msg.Role != test.expected {
t.Errorf("role not lowercased: got %v, expected %v", msg.Role, test.expected)
}
}
}
func TestToolFunction_UnmarshalJSON(t *testing.T) {
tests := []struct {
name string
input string
wantErr string
}{
{
name: "valid enum with same types",
input: `{
"name": "test",
"description": "test function",
"parameters": {
"type": "object",
"required": ["test"],
"properties": {
"test": {
"type": "string",
"description": "test prop",
"enum": ["a", "b", "c"]
}
}
}
}`,
wantErr: "",
},
{
name: "empty enum array",
input: `{
"name": "test",
"description": "test function",
"parameters": {
"type": "object",
"required": ["test"],
"properties": {
"test": {
"type": "string",
"description": "test prop",
"enum": []
}
}
}
}`,
wantErr: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var tf ToolFunction
err := json.Unmarshal([]byte(tt.input), &tf)
if tt.wantErr != "" {
require.Error(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
} else {
require.NoError(t, err)
}
})
}
}
func TestToolFunctionParameters_MarshalJSON(t *testing.T) {
tests := []struct {
name string
input ToolFunctionParameters
expected string
}{
{
name: "simple object with string property",
input: ToolFunctionParameters{
Type: "object",
Required: []string{"name"},
Properties: map[string]ToolProperty{
"name": {Type: PropertyType{"string"}},
},
},
expected: `{"type":"object","required":["name"],"properties":{"name":{"type":"string"}}}`,
},
{
name: "no required",
input: ToolFunctionParameters{
Type: "object",
Properties: map[string]ToolProperty{
"name": {Type: PropertyType{"string"}},
},
},
expected: `{"type":"object","properties":{"name":{"type":"string"}}}`,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
data, err := json.Marshal(test.input)
require.NoError(t, err)
assert.Equal(t, test.expected, string(data))
})
}
}
func TestToolCallFunction_IndexAlwaysMarshals(t *testing.T) {
fn := ToolCallFunction{
Name: "echo",
Arguments: ToolCallFunctionArguments{"message": "hi"},
}
data, err := json.Marshal(fn)
require.NoError(t, err)
raw := map[string]any{}
require.NoError(t, json.Unmarshal(data, &raw))
require.Contains(t, raw, "index")
assert.Equal(t, float64(0), raw["index"])
fn.Index = 3
data, err = json.Marshal(fn)
require.NoError(t, err)
raw = map[string]any{}
require.NoError(t, json.Unmarshal(data, &raw))
require.Contains(t, raw, "index")
assert.Equal(t, float64(3), raw["index"])
}
func TestPropertyType_UnmarshalJSON(t *testing.T) {
tests := []struct {
name string
input string
expected PropertyType
}{
{
name: "string type",
input: `"string"`,
expected: PropertyType{"string"},
},
{
name: "array of types",
input: `["string", "number"]`,
expected: PropertyType{"string", "number"},
},
{
name: "array with single type",
input: `["string"]`,
expected: PropertyType{"string"},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
var pt PropertyType
if err := json.Unmarshal([]byte(test.input), &pt); err != nil {
t.Errorf("Unexpected error: %v", err)
}
if len(pt) != len(test.expected) {
t.Errorf("Length mismatch: got %v, expected %v", len(pt), len(test.expected))
}
for i, v := range pt {
if v != test.expected[i] {
t.Errorf("Value mismatch at index %d: got %v, expected %v", i, v, test.expected[i])
}
}
})
}
}
func TestPropertyType_MarshalJSON(t *testing.T) {
tests := []struct {
name string
input PropertyType
expected string
}{
{
name: "single type",
input: PropertyType{"string"},
expected: `"string"`,
},
{
name: "multiple types",
input: PropertyType{"string", "number"},
expected: `["string","number"]`,
},
{
name: "empty type",
input: PropertyType{},
expected: `[]`,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
data, err := json.Marshal(test.input)
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
if string(data) != test.expected {
t.Errorf("Marshaled data mismatch: got %v, expected %v", string(data), test.expected)
}
})
}
}
func TestThinking_UnmarshalJSON(t *testing.T) {
tests := []struct {
name string
input string
expectedThinking *ThinkValue
expectedError bool
}{
{
name: "true",
input: `{ "think": true }`,
expectedThinking: &ThinkValue{Value: true},
},
{
name: "false",
input: `{ "think": false }`,
expectedThinking: &ThinkValue{Value: false},
},
{
name: "unset",
input: `{ }`,
expectedThinking: nil,
},
{
name: "string_high",
input: `{ "think": "high" }`,
expectedThinking: &ThinkValue{Value: "high"},
},
{
name: "string_medium",
input: `{ "think": "medium" }`,
expectedThinking: &ThinkValue{Value: "medium"},
},
{
name: "string_low",
input: `{ "think": "low" }`,
expectedThinking: &ThinkValue{Value: "low"},
},
{
name: "invalid_string",
input: `{ "think": "invalid" }`,
expectedThinking: nil,
expectedError: true,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
var req GenerateRequest
err := json.Unmarshal([]byte(test.input), &req)
if test.expectedError {
require.Error(t, err)
} else {
require.NoError(t, err)
if test.expectedThinking == nil {
assert.Nil(t, req.Think)
} else {
require.NotNil(t, req.Think)
assert.Equal(t, test.expectedThinking.Value, req.Think.Value)
}
}
})
}
}
func TestToolFunctionParameters_String(t *testing.T) {
tests := []struct {
name string
params ToolFunctionParameters
expected string
}{
{
name: "simple object with string property",
params: ToolFunctionParameters{
Type: "object",
Required: []string{"name"},
Properties: map[string]ToolProperty{
"name": {
Type: PropertyType{"string"},
Description: "The name of the person",
},
},
},
expected: `{"type":"object","required":["name"],"properties":{"name":{"type":"string","description":"The name of the person"}}}`,
},
{
name: "marshal failure returns empty string",
params: ToolFunctionParameters{
Type: "object",
Defs: func() any {
// Create a cycle that will cause json.Marshal to fail
type selfRef struct {
Self *selfRef
}
s := &selfRef{}
s.Self = s
return s
}(),
Properties: map[string]ToolProperty{},
},
expected: "",
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
result := test.params.String()
assert.Equal(t, test.expected, result)
})
}
}

View File

@@ -1,142 +0,0 @@
package api
import (
"testing"
)
func TestToolParameterToTypeScriptType(t *testing.T) {
tests := []struct {
name string
param ToolProperty
expected string
}{
{
name: "single string type",
param: ToolProperty{
Type: PropertyType{"string"},
},
expected: "string",
},
{
name: "single number type",
param: ToolProperty{
Type: PropertyType{"number"},
},
expected: "number",
},
{
name: "integer maps to number",
param: ToolProperty{
Type: PropertyType{"integer"},
},
expected: "number",
},
{
name: "boolean type",
param: ToolProperty{
Type: PropertyType{"boolean"},
},
expected: "boolean",
},
{
name: "array type",
param: ToolProperty{
Type: PropertyType{"array"},
},
expected: "any[]",
},
{
name: "object type",
param: ToolProperty{
Type: PropertyType{"object"},
},
expected: "Record<string, any>",
},
{
name: "null type",
param: ToolProperty{
Type: PropertyType{"null"},
},
expected: "null",
},
{
name: "multiple types as union",
param: ToolProperty{
Type: PropertyType{"string", "number"},
},
expected: "string | number",
},
{
name: "string or null union",
param: ToolProperty{
Type: PropertyType{"string", "null"},
},
expected: "string | null",
},
{
name: "anyOf with single types",
param: ToolProperty{
AnyOf: []ToolProperty{
{Type: PropertyType{"string"}},
{Type: PropertyType{"number"}},
},
},
expected: "string | number",
},
{
name: "anyOf with multiple types in each branch",
param: ToolProperty{
AnyOf: []ToolProperty{
{Type: PropertyType{"string", "null"}},
{Type: PropertyType{"number"}},
},
},
expected: "string | null | number",
},
{
name: "nested anyOf",
param: ToolProperty{
AnyOf: []ToolProperty{
{Type: PropertyType{"boolean"}},
{
AnyOf: []ToolProperty{
{Type: PropertyType{"string"}},
{Type: PropertyType{"number"}},
},
},
},
},
expected: "boolean | string | number",
},
{
name: "empty type returns any",
param: ToolProperty{
Type: PropertyType{},
},
expected: "any",
},
{
name: "unknown type maps to any",
param: ToolProperty{
Type: PropertyType{"unknown_type"},
},
expected: "any",
},
{
name: "multiple types including array",
param: ToolProperty{
Type: PropertyType{"string", "array", "null"},
},
expected: "string | any[] | null",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := tt.param.ToTypeScriptType()
if result != tt.expected {
t.Errorf("ToTypeScriptType() = %q, want %q", result, tt.expected)
}
})
}
}

16
app/.eslintrc.json Normal file
View File

@@ -0,0 +1,16 @@
{
"env": {
"browser": true,
"es6": true,
"node": true
},
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"plugin:import/recommended",
"plugin:import/electron",
"plugin:import/typescript"
],
"parser": "@typescript-eslint/parser"
}

101
app/.gitignore vendored
View File

@@ -1,11 +1,92 @@
ollama.syso
*.crt
*.exe
/app/app
/app/squirrel
ollama
*cover*
.vscode
.env
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
.DS_Store
.claude
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# parcel-bundler cache (https://parceljs.org/)
.cache
# next.js build output
.next
# nuxt.js build output
.nuxt
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# Webpack
.webpack/
# Vite
.vite/
# Electron-Forge
out/

View File

@@ -1,97 +1,27 @@
# Ollama for macOS and Windows
# Desktop
## Download
_Note: the Ollama desktop app is a work in progress and is not ready yet for general use._
- [macOS](https://github.com/ollama/app/releases/download/latest/Ollama.dmg)
- [Windows](https://github.com/ollama/app/releases/download/latest/OllamaSetup.exe)
This app builds upon Ollama to provide a desktop experience for running models.
## Development
## Developing
### Desktop App
First, build the `ollama` binary:
```bash
go generate ./... &&
go run ./cmd/app
```
make -C ..
```
### UI Development
Then run the desktop app with `npm start`:
#### Setup
Install required tools:
```bash
go install github.com/tkrajina/typescriptify-golang-structs/tscriptify@latest
```
#### Develop UI (Development Mode)
1. Start the React development server (with hot-reload):
```bash
cd ui/app
npm install
npm run dev
npm start
```
2. In a separate terminal, run the Ollama app with the `-dev` flag:
## Coming soon
```bash
go generate ./... &&
OLLAMA_DEBUG=1 go run ./cmd/app -dev
```
The `-dev` flag enables:
- Loading the UI from the Vite dev server at http://localhost:5173
- Fixed UI server port at http://127.0.0.1:3001 for API requests
- CORS headers for cross-origin requests
- Hot-reload support for UI development
## Build
### Windows
- https://jrsoftware.org/isinfo.php
**Dependencies** - either build a local copy of ollama, or use a github release
```powershell
# Local dependencies
.\scripts\deps_local.ps1
# Release dependencies
.\scripts\deps_release.ps1 0.6.8
```
**Build**
```powershell
.\scripts\build_windows.ps1
```
### macOS
CI builds with Xcode 14.1 for OS compatibility prior to v13. If you want to manually build v11+ support, you can download the older Xcode [here](https://developer.apple.com/services-account/download?path=/Developer_Tools/Xcode_14.1/Xcode_14.1.xip), extract, then `mv ./Xcode.app /Applications/Xcode_14.1.0.app` then activate with:
```
export CGO_CFLAGS=-mmacosx-version-min=12.0
export CGO_CXXFLAGS=-mmacosx-version-min=12.0
export CGO_LDFLAGS=-mmacosx-version-min=12.0
export SDKROOT=/Applications/Xcode_14.1.0.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk
export DEVELOPER_DIR=/Applications/Xcode_14.1.0.app/Contents/Developer
```
**Dependencies** - either build a local copy of Ollama, or use a GitHub release:
```sh
# Local dependencies
./scripts/deps_local.sh
# Release dependencies
./scripts/deps_release.sh 0.6.8
```
**Build**
```sh
./scripts/build_darwin.sh
```
- Browse the latest available models on Hugging Face and other sources
- Keep track of previous conversations with models
- Switch quickly between models
- Connect to remote Ollama servers to run models

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.3 KiB

View File

@@ -1,19 +0,0 @@
//go:build windows || darwin
package assets
import (
"embed"
"io/fs"
)
//go:embed *.ico
var icons embed.FS
func ListIcons() ([]string, error) {
return fs.Glob(icons, "*")
}
func GetIcon(filename string) ([]byte, error) {
return icons.ReadFile(filename)
}

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

BIN
app/assets/icon.icns Normal file
View File

Binary file not shown.

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 442 B

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 889 B

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 76 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 115 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 116 KiB

View File

@@ -1,26 +0,0 @@
//go:build windows || darwin
package auth
import (
"encoding/base64"
"fmt"
"net/url"
"os"
"github.com/ollama/ollama/auth"
)
// BuildConnectURL generates the connect URL with the public key and device name
func BuildConnectURL(baseURL string) (string, error) {
pubKey, err := auth.GetPublicKey()
if err != nil {
return "", fmt.Errorf("failed to get public key: %w", err)
}
encodedKey := base64.RawURLEncoding.EncodeToString([]byte(pubKey))
hostname, _ := os.Hostname()
encodedDevice := url.QueryEscape(hostname)
return fmt.Sprintf("%s/connect?name=%s&key=%s&launch=true", baseURL, encodedDevice, encodedKey), nil
}

View File

@@ -1,7 +0,0 @@
#import <Cocoa/Cocoa.h>
@interface AppDelegate : NSObject <NSApplicationDelegate>
- (void)applicationDidFinishLaunching:(NSNotification *)aNotification;
@end

View File

@@ -1,480 +0,0 @@
//go:build windows || darwin
package main
import (
"context"
"encoding/json"
"errors"
"fmt"
"io"
"log/slog"
"net"
"net/http"
"net/url"
"os"
"os/exec"
"os/signal"
"path/filepath"
"runtime"
"strings"
"syscall"
"time"
"github.com/google/uuid"
"github.com/ollama/ollama/app/auth"
"github.com/ollama/ollama/app/logrotate"
"github.com/ollama/ollama/app/server"
"github.com/ollama/ollama/app/store"
"github.com/ollama/ollama/app/tools"
"github.com/ollama/ollama/app/ui"
"github.com/ollama/ollama/app/updater"
"github.com/ollama/ollama/app/version"
)
var (
wv = &Webview{}
uiServerPort int
)
var debug = strings.EqualFold(os.Getenv("OLLAMA_DEBUG"), "true") || os.Getenv("OLLAMA_DEBUG") == "1"
var (
fastStartup = false
devMode = false
)
type appMove int
const (
CannotMove appMove = iota
UserDeclinedMove
MoveCompleted
AlreadyMoved
LoginSession
PermissionDenied
MoveError
)
func main() {
startHidden := false
var urlSchemeRequest string
if len(os.Args) > 1 {
for _, arg := range os.Args {
// Handle URL scheme requests (Windows)
if strings.HasPrefix(arg, "ollama://") {
urlSchemeRequest = arg
slog.Info("received URL scheme request", "url", arg)
continue
}
switch arg {
case "serve":
fmt.Fprintln(os.Stderr, "serve command not supported, use ollama")
os.Exit(1)
case "version", "-v", "--version":
fmt.Println(version.Version)
os.Exit(0)
case "background":
// When running the process in this "background" mode, we spawn a
// child process for the main app. This is necessary so the
// "Allow in the Background" setting in MacOS can be unchecked
// without breaking the main app. Two copies of the app are
// present in the bundle, one for the main app and one for the
// background initiator.
fmt.Fprintln(os.Stdout, "starting in background")
runInBackground()
os.Exit(0)
case "hidden", "-j", "--hide":
// startHidden suppresses the UI on startup, and can be triggered multiple ways
// On windows, path based via login startup detection
// On MacOS via [NSApp isHidden] from `open -j -a /Applications/Ollama.app` or equivalent
// On both via the "hidden" command line argument
startHidden = true
case "--fast-startup":
// Skip optional steps like pending updates to start quickly for immediate use
fastStartup = true
case "-dev", "--dev":
// Development mode: use local dev server and enable CORS
devMode = true
}
}
}
level := slog.LevelInfo
if debug {
level = slog.LevelDebug
}
logrotate.Rotate(appLogPath)
if _, err := os.Stat(filepath.Dir(appLogPath)); errors.Is(err, os.ErrNotExist) {
if err := os.MkdirAll(filepath.Dir(appLogPath), 0o755); err != nil {
slog.Error(fmt.Sprintf("failed to create server log dir %v", err))
return
}
}
var logFile io.Writer
var err error
logFile, err = os.OpenFile(appLogPath, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0o755)
if err != nil {
slog.Error(fmt.Sprintf("failed to create server log %v", err))
return
}
// Detect if we're a GUI app on windows, and if not, send logs to console as well
if os.Stderr.Fd() != 0 {
// Console app detected
logFile = io.MultiWriter(os.Stderr, logFile)
}
handler := slog.NewTextHandler(logFile, &slog.HandlerOptions{
Level: level,
AddSource: true,
ReplaceAttr: func(_ []string, attr slog.Attr) slog.Attr {
if attr.Key == slog.SourceKey {
source := attr.Value.Any().(*slog.Source)
source.File = filepath.Base(source.File)
}
return attr
},
})
slog.SetDefault(slog.New(handler))
logStartup()
// On Windows, check if another instance is running and send URL to it
// Do this after logging is set up so we can debug issues
if runtime.GOOS == "windows" && urlSchemeRequest != "" {
slog.Debug("checking for existing instance", "url", urlSchemeRequest)
if checkAndHandleExistingInstance(urlSchemeRequest) {
// The function will exit if it successfully sends to another instance
// If we reach here, we're the first/only instance
} else {
// No existing instance found, handle the URL scheme in this instance
go func() {
handleURLSchemeInCurrentInstance(urlSchemeRequest)
}()
}
}
if u := os.Getenv("OLLAMA_UPDATE_URL"); u != "" {
updater.UpdateCheckURLBase = u
}
// Detect if this is a first start after an upgrade, in
// which case we need to do some cleanup
var skipMove bool
if _, err := os.Stat(updater.UpgradeMarkerFile); err == nil {
slog.Debug("first start after upgrade")
err = updater.DoPostUpgradeCleanup()
if err != nil {
slog.Error("failed to cleanup prior version", "error", err)
}
// We never prompt to move the app after an upgrade
skipMove = true
// Start hidden after updates to prevent UI from opening automatically
startHidden = true
}
if !skipMove && !fastStartup {
if maybeMoveAndRestart() == MoveCompleted {
return
}
}
// Check if another instance is already running
// On Windows, focus the existing instance; on other platforms, kill it
handleExistingInstance(startHidden)
// on macOS, offer the user to create a symlink
// from /usr/local/bin/ollama to the app bundle
installSymlink()
var ln net.Listener
if devMode {
// Use a fixed port in dev mode for predictable API access
ln, err = net.Listen("tcp", "127.0.0.1:3001")
} else {
ln, err = net.Listen("tcp", "127.0.0.1:0")
}
if err != nil {
slog.Error("failed to find available port", "error", err)
return
}
port := ln.Addr().(*net.TCPAddr).Port
token := uuid.NewString()
wv.port = port
wv.token = token
uiServerPort = port
st := &store.Store{}
// Enable CORS in development mode
if devMode {
os.Setenv("OLLAMA_CORS", "1")
// Check if Vite dev server is running on port 5173
var conn net.Conn
var err error
for _, addr := range []string{"127.0.0.1:5173", "localhost:5173"} {
conn, err = net.DialTimeout("tcp", addr, 2*time.Second)
if err == nil {
conn.Close()
break
}
}
if err != nil {
slog.Error("Vite dev server not running on port 5173")
fmt.Fprintln(os.Stderr, "Error: Vite dev server is not running on port 5173")
fmt.Fprintln(os.Stderr, "Please run 'npm run dev' in the ui/app directory to start the UI in development mode")
os.Exit(1)
}
}
// Initialize tools registry
toolRegistry := tools.NewRegistry()
slog.Info("initialized tools registry", "tool_count", len(toolRegistry.List()))
// ctx is the app-level context that will be used to stop the app
ctx, cancel := context.WithCancel(context.Background())
// octx is the ollama server context that will be used to stop the ollama server
octx, ocancel := context.WithCancel(ctx)
// TODO (jmorganca): instead we should instantiate the
// webview with the store instead of assigning it here, however
// making the webview a global variable is easier for now
wv.Store = st
done := make(chan error, 1)
osrv := server.New(st, devMode)
go func() {
slog.Info("starting ollama server")
done <- osrv.Run(octx)
}()
uiServer := ui.Server{
Token: token,
Restart: func() {
ocancel()
<-done
octx, ocancel = context.WithCancel(ctx)
go func() {
done <- osrv.Run(octx)
}()
},
Store: st,
ToolRegistry: toolRegistry,
Dev: devMode,
Logger: slog.Default(),
}
srv := &http.Server{
Handler: uiServer.Handler(),
}
// Start the UI server
slog.Info("starting ui server", "port", port)
go func() {
slog.Debug("starting ui server on port", "port", port)
err = srv.Serve(ln)
if err != nil && !errors.Is(err, http.ErrServerClosed) {
slog.Warn("desktop server", "error", err)
}
slog.Debug("background desktop server done")
}()
updater := &updater.Updater{Store: st}
updater.StartBackgroundUpdaterChecker(ctx, UpdateAvailable)
hasCompletedFirstRun, err := st.HasCompletedFirstRun()
if err != nil {
slog.Error("failed to load has completed first run", "error", err)
}
if !hasCompletedFirstRun {
err = st.SetHasCompletedFirstRun(true)
if err != nil {
slog.Error("failed to set has completed first run", "error", err)
}
}
// capture SIGINT and SIGTERM signals and gracefully shutdown the app
signals := make(chan os.Signal, 1)
signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
go func() {
<-signals
slog.Info("received SIGINT or SIGTERM signal, shutting down")
quit()
}()
if urlSchemeRequest != "" {
go func() {
handleURLSchemeInCurrentInstance(urlSchemeRequest)
}()
} else {
slog.Debug("no URL scheme request to handle")
}
go func() {
slog.Debug("waiting for ollama server to be ready")
if err := ui.WaitForServer(ctx, 10*time.Second); err != nil {
slog.Warn("ollama server not ready, continuing anyway", "error", err)
}
if _, err := uiServer.UserData(ctx); err != nil {
slog.Warn("failed to load user data", "error", err)
}
}()
osRun(cancel, hasCompletedFirstRun, startHidden)
slog.Info("shutting down desktop server")
if err := srv.Close(); err != nil {
slog.Warn("error shutting down desktop server", "error", err)
}
slog.Info("shutting down ollama server")
cancel()
<-done
}
func startHiddenTasks() {
// If an upgrade is ready and we're in hidden mode, perform it at startup.
// If we're not in hidden mode, we want to start as fast as possible and not
// slow the user down with an upgrade.
if updater.IsUpdatePending() {
if fastStartup {
// CLI triggered app startup use-case
slog.Info("deferring pending update for fast startup")
} else {
if err := updater.DoUpgradeAtStartup(); err != nil {
slog.Info("unable to perform upgrade at startup", "error", err)
// Make sure the restart to upgrade menu shows so we can attempt an interactive upgrade to get authorization
UpdateAvailable("")
} else {
slog.Debug("launching new version...")
// TODO - consider a timer that aborts if this takes too long and we haven't been killed yet...
LaunchNewApp()
os.Exit(0)
}
}
}
}
func checkUserLoggedIn(uiServerPort int) bool {
if uiServerPort == 0 {
slog.Debug("UI server not ready yet, skipping auth check")
return false
}
resp, err := http.Post(fmt.Sprintf("http://127.0.0.1:%d/api/me", uiServerPort), "application/json", nil)
if err != nil {
slog.Debug("failed to call local auth endpoint", "error", err)
return false
}
defer resp.Body.Close()
// Check if the response is successful
if resp.StatusCode != http.StatusOK {
slog.Debug("auth endpoint returned non-OK status", "status", resp.StatusCode)
return false
}
var user struct {
ID string `json:"id"`
Name string `json:"name"`
}
if err := json.NewDecoder(resp.Body).Decode(&user); err != nil {
slog.Debug("failed to parse user response", "error", err)
return false
}
// Verify we have a valid user with an ID and name
if user.ID == "" || user.Name == "" {
slog.Debug("user response missing required fields", "id", user.ID, "name", user.Name)
return false
}
slog.Debug("user is logged in", "user_id", user.ID, "user_name", user.Name)
return true
}
// handleConnectURLScheme fetches the connect URL and opens it in the browser
func handleConnectURLScheme() {
if checkUserLoggedIn(uiServerPort) {
slog.Info("user is already logged in, opening app instead")
showWindow(wv.webview.Window())
return
}
connectURL, err := auth.BuildConnectURL("https://ollama.com")
if err != nil {
slog.Error("failed to build connect URL", "error", err)
openInBrowser("https://ollama.com/connect")
return
}
openInBrowser(connectURL)
}
// openInBrowser opens the specified URL in the default browser
func openInBrowser(url string) {
var cmd string
var args []string
switch runtime.GOOS {
case "windows":
cmd = "rundll32"
args = []string{"url.dll,FileProtocolHandler", url}
case "darwin":
cmd = "open"
args = []string{url}
default: // "linux", "freebsd", "openbsd", "netbsd"... should not reach here
slog.Warn("unsupported OS for openInBrowser", "os", runtime.GOOS)
}
slog.Info("executing browser command", "cmd", cmd, "args", args)
if err := exec.Command(cmd, args...).Start(); err != nil {
slog.Error("failed to open URL in browser", "url", url, "cmd", cmd, "args", args, "error", err)
}
}
// parseURLScheme parses an ollama:// URL and validates it
// Supports: ollama:// (open app) and ollama://connect (OAuth)
func parseURLScheme(urlSchemeRequest string) (isConnect bool, err error) {
parsedURL, err := url.Parse(urlSchemeRequest)
if err != nil {
return false, fmt.Errorf("invalid URL: %w", err)
}
// Check if this is a connect URL
if parsedURL.Host == "connect" || strings.TrimPrefix(parsedURL.Path, "/") == "connect" {
return true, nil
}
// Allow bare ollama:// or ollama:/// to open the app
if (parsedURL.Host == "" && parsedURL.Path == "") || parsedURL.Path == "/" {
return false, nil
}
return false, fmt.Errorf("unsupported ollama:// URL path: %s", urlSchemeRequest)
}
// handleURLSchemeInCurrentInstance processes URL scheme requests in the current instance
func handleURLSchemeInCurrentInstance(urlSchemeRequest string) {
isConnect, err := parseURLScheme(urlSchemeRequest)
if err != nil {
slog.Error("failed to parse URL scheme request", "url", urlSchemeRequest, "error", err)
return
}
if isConnect {
handleConnectURLScheme()
} else {
if wv.webview != nil {
showWindow(wv.webview.Window())
}
}
}

View File

@@ -1,269 +0,0 @@
//go:build windows || darwin
package main
// #cgo CFLAGS: -x objective-c
// #cgo LDFLAGS: -framework Webkit -framework Cocoa -framework LocalAuthentication -framework ServiceManagement
// #include "app_darwin.h"
// #include "../../updater/updater_darwin.h"
// typedef const char cchar_t;
import "C"
import (
"log/slog"
"os"
"os/exec"
"path/filepath"
"strings"
"time"
"unsafe"
"github.com/ollama/ollama/app/updater"
"github.com/ollama/ollama/app/version"
)
var ollamaPath = func() string {
if updater.BundlePath != "" {
return filepath.Join(updater.BundlePath, "Contents", "Resources", "ollama")
}
pwd, err := os.Getwd()
if err != nil {
slog.Warn("failed to get pwd", "error", err)
return ""
}
return filepath.Join(pwd, "ollama")
}()
var (
isApp = updater.BundlePath != ""
appLogPath = filepath.Join(os.Getenv("HOME"), ".ollama", "logs", "app.log")
launchAgentPath = filepath.Join(os.Getenv("HOME"), "Library", "LaunchAgents", "com.ollama.ollama.plist")
)
// TODO(jmorganca): pre-create the window and pass
// it to the webview instead of using the internal one
//
//export StartUI
func StartUI(path *C.cchar_t) {
p := C.GoString(path)
wv.Run(p)
styleWindow(wv.webview.Window())
C.setWindowDelegate(wv.webview.Window())
}
//export ShowUI
func ShowUI() {
// If webview is already running, just show the window
if wv.IsRunning() && wv.webview != nil {
showWindow(wv.webview.Window())
} else {
root := C.CString("/")
defer C.free(unsafe.Pointer(root))
StartUI(root)
}
}
//export StopUI
func StopUI() {
wv.Terminate()
}
//export StartUpdate
func StartUpdate() {
if err := updater.DoUpgrade(true); err != nil {
slog.Error("upgrade failed", "error", err)
return
}
slog.Debug("launching new version...")
// TODO - consider a timer that aborts if this takes too long and we haven't been killed yet...
LaunchNewApp()
// not reached if upgrade works, the new app will kill this process
}
//export darwinStartHiddenTasks
func darwinStartHiddenTasks() {
startHiddenTasks()
}
func init() {
// Temporary code to mimic Squirrel ShipIt behavior
if len(os.Args) > 2 {
if os.Args[1] == "___launch___" {
path := strings.TrimPrefix(os.Args[2], "file://")
slog.Info("Ollama binary called as ShipIt - launching", "app", path)
appName := C.CString(path)
defer C.free(unsafe.Pointer(appName))
C.launchApp(appName)
slog.Info("other instance has been launched")
time.Sleep(5 * time.Second)
slog.Info("exiting with zero status")
os.Exit(0)
}
}
}
// maybeMoveAndRestart checks if we should relocate
// and returns true if we did and should immediately exit
func maybeMoveAndRestart() appMove {
if updater.BundlePath == "" {
// Typically developer mode with 'go run ./cmd/app'
return CannotMove
}
// Respect users intent if they chose "keep" vs. "replace" when dragging to Applications
if strings.HasPrefix(updater.BundlePath, strings.TrimSuffix(updater.SystemWidePath, filepath.Ext(updater.SystemWidePath))) {
return AlreadyMoved
}
// Ask to move to applications directory
status := (appMove)(C.askToMoveToApplications())
if status == MoveCompleted {
// Double check
if _, err := os.Stat(updater.SystemWidePath); err != nil {
slog.Warn("stat failure after move", "path", updater.SystemWidePath, "error", err)
return MoveError
}
}
return status
}
// handleExistingInstance handles existing instances on macOS
func handleExistingInstance(_ bool) {
C.killOtherInstances()
}
func installSymlink() {
if !isApp {
return
}
cliPath := C.CString(ollamaPath)
defer C.free(unsafe.Pointer(cliPath))
// Check the users path first
cmd, _ := exec.LookPath("ollama")
if cmd != "" {
resolved, err := os.Readlink(cmd)
if err == nil {
tmp, err := filepath.Abs(resolved)
if err == nil {
resolved = tmp
}
} else {
resolved = cmd
}
if resolved == ollamaPath {
slog.Info("ollama already in users PATH", "cli", cmd)
return
}
}
code := C.installSymlink(cliPath)
if code != 0 {
slog.Error("Failed to install symlink")
}
}
func UpdateAvailable(ver string) error {
slog.Debug("update detected, adjusting menu")
// TODO (jmorganca): find a better check for development mode than checking the bundle path
if updater.BundlePath != "" {
C.updateAvailable()
}
return nil
}
func osRun(_ func(), hasCompletedFirstRun, startHidden bool) {
registerLaunchAgent(hasCompletedFirstRun)
// Run the native macOS app
// Note: this will block until the app is closed
slog.Debug("starting native darwin event loop")
C.run(C._Bool(hasCompletedFirstRun), C._Bool(startHidden))
}
func quit() {
C.quit()
}
func LaunchNewApp() {
appName := C.CString(updater.BundlePath)
defer C.free(unsafe.Pointer(appName))
C.launchApp(appName)
}
// Send a request to the main app thread to load a UI page
func sendUIRequestMessage(path string) {
p := C.CString(path)
defer C.free(unsafe.Pointer(p))
C.uiRequest(p)
}
func registerLaunchAgent(hasCompletedFirstRun bool) {
// Remove any stale Login Item registrations
C.unregisterSelfFromLoginItem()
C.registerSelfAsLoginItem(C._Bool(hasCompletedFirstRun))
}
func logStartup() {
appPath := updater.BundlePath
if appPath == updater.SystemWidePath {
// Detect sandboxed scenario
exe, err := os.Executable()
if err == nil {
p := filepath.Dir(exe)
if filepath.Base(p) == "MacOS" {
p = filepath.Dir(filepath.Dir(p))
if p != appPath {
slog.Info("starting sandboxed Ollama", "app", appPath, "sandbox", p)
return
}
}
}
}
slog.Info("starting Ollama", "app", appPath, "version", version.Version, "OS", updater.UserAgentOS)
}
func hideWindow(ptr unsafe.Pointer) {
C.hideWindow(C.uintptr_t(uintptr(ptr)))
}
func showWindow(ptr unsafe.Pointer) {
C.showWindow(C.uintptr_t(uintptr(ptr)))
}
func styleWindow(ptr unsafe.Pointer) {
C.styleWindow(C.uintptr_t(uintptr(ptr)))
}
func runInBackground() {
cmd := exec.Command(filepath.Join(updater.BundlePath, "Contents", "MacOS", "Ollama"), "hidden")
if cmd != nil {
err := cmd.Run()
if err != nil {
slog.Error("failed to run Ollama", "bundlePath", updater.BundlePath, "error", err)
os.Exit(1)
}
} else {
slog.Error("failed to start Ollama in background", "bundlePath", updater.BundlePath)
os.Exit(1)
}
}
func drag(ptr unsafe.Pointer) {
C.drag(C.uintptr_t(uintptr(ptr)))
}
func doubleClick(ptr unsafe.Pointer) {
C.doubleClick(C.uintptr_t(uintptr(ptr)))
}
//export handleConnectURL
func handleConnectURL() {
handleConnectURLScheme()
}
// checkAndHandleExistingInstance is not needed on non-Windows platforms
func checkAndHandleExistingInstance(_ string) bool {
return false
}

View File

@@ -1,43 +0,0 @@
#import <Cocoa/Cocoa.h>
#import <Security/Security.h>
@interface AppDelegate : NSObject <NSApplicationDelegate>
- (void)applicationDidFinishLaunching:(NSNotification *)aNotification;
@end
enum AppMove
{
CannotMove,
UserDeclinedMove,
MoveCompleted,
AlreadyMoved,
LoginSession,
PermissionDenied,
MoveError,
};
void run(bool firstTimeRun, bool startHidden);
void killOtherInstances();
enum AppMove askToMoveToApplications();
int createSymlinkWithAuthorization();
int installSymlink(const char *cliPath);
extern void Restart();
// extern void Quit();
void StartUI(const char *path);
void ShowUI();
void StopUI();
void StartUpdate();
void darwinStartHiddenTasks();
void launchApp(const char *appPath);
void updateAvailable();
void quit();
void uiRequest(char *path);
void registerSelfAsLoginItem(bool firstTimeRun);
void unregisterSelfFromLoginItem();
void setWindowDelegate(void *window);
void showWindow(uintptr_t wndPtr);
void hideWindow(uintptr_t wndPtr);
void styleWindow(uintptr_t wndPtr);
void drag(uintptr_t wndPtr);
void doubleClick(uintptr_t wndPtr);
void handleConnectURL();

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,441 +0,0 @@
//go:build windows || darwin
package main
import (
"errors"
"fmt"
"io"
"log"
"log/slog"
"os"
"os/exec"
"os/signal"
"path/filepath"
"runtime"
"strings"
"syscall"
"unsafe"
"github.com/ollama/ollama/app/updater"
"github.com/ollama/ollama/app/version"
"github.com/ollama/ollama/app/wintray"
"golang.org/x/sys/windows"
)
var (
u32 = windows.NewLazySystemDLL("User32.dll")
pBringWindowToTop = u32.NewProc("BringWindowToTop")
pShowWindow = u32.NewProc("ShowWindow")
pSendMessage = u32.NewProc("SendMessageA")
pGetSystemMetrics = u32.NewProc("GetSystemMetrics")
pGetWindowRect = u32.NewProc("GetWindowRect")
pSetWindowPos = u32.NewProc("SetWindowPos")
pSetForegroundWindow = u32.NewProc("SetForegroundWindow")
pSetActiveWindow = u32.NewProc("SetActiveWindow")
pIsIconic = u32.NewProc("IsIconic")
appPath = filepath.Join(os.Getenv("LOCALAPPDATA"), "Programs", "Ollama")
appLogPath = filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "app.log")
startupShortcut = filepath.Join(os.Getenv("APPDATA"), "Microsoft", "Windows", "Start Menu", "Programs", "Startup", "Ollama.lnk")
ollamaPath string
DesktopAppName = "ollama app.exe"
)
func init() {
// With alternate install location use executable location
exe, err := os.Executable()
if err != nil {
slog.Warn("error discovering executable directory", "error", err)
} else {
appPath = filepath.Dir(exe)
}
ollamaPath = filepath.Join(appPath, "ollama.exe")
// Handle developer mode (go run ./cmd/app)
if _, err := os.Stat(ollamaPath); err != nil {
pwd, err := os.Getwd()
if err != nil {
slog.Warn("missing ollama.exe and failed to get pwd", "error", err)
return
}
distAppPath := filepath.Join(pwd, "dist", "windows-"+runtime.GOARCH)
distOllamaPath := filepath.Join(distAppPath, "ollama.exe")
if _, err := os.Stat(distOllamaPath); err == nil {
slog.Info("detected developer mode")
appPath = distAppPath
ollamaPath = distOllamaPath
}
}
}
func maybeMoveAndRestart() appMove {
return 0
}
// handleExistingInstance checks for existing instances and optionally focuses them
func handleExistingInstance(startHidden bool) {
if wintray.CheckAndFocusExistingInstance(!startHidden) {
slog.Info("existing instance found, exiting")
os.Exit(0)
}
}
func installSymlink() {}
type appCallbacks struct {
t wintray.TrayCallbacks
shutdown func()
}
var app = &appCallbacks{}
func (ac *appCallbacks) UIRun(path string) {
wv.Run(path)
}
func (*appCallbacks) UIShow() {
if wv.webview != nil {
showWindow(wv.webview.Window())
} else {
wv.Run("/")
}
}
func (*appCallbacks) UITerminate() {
wv.Terminate()
}
func (*appCallbacks) UIRunning() bool {
return wv.IsRunning()
}
func (app *appCallbacks) Quit() {
app.t.Quit()
wv.Terminate()
}
// TODO - reconcile with above for consistency between mac/windows
func quit() {
wv.Terminate()
}
func (app *appCallbacks) DoUpdate() {
// Safeguard in case we have requests in flight that need to drain...
slog.Info("Waiting for server to shutdown")
app.shutdown()
if err := updater.DoUpgrade(true); err != nil {
slog.Warn(fmt.Sprintf("upgrade attempt failed: %s", err))
}
}
// HandleURLScheme implements the URLSchemeHandler interface
func (app *appCallbacks) HandleURLScheme(urlScheme string) {
handleURLSchemeRequest(urlScheme)
}
// handleURLSchemeRequest processes URL scheme requests from other instances
func handleURLSchemeRequest(urlScheme string) {
isConnect, err := parseURLScheme(urlScheme)
if err != nil {
slog.Error("failed to parse URL scheme request", "url", urlScheme, "error", err)
return
}
if isConnect {
handleConnectURLScheme()
} else {
if wv.webview != nil {
showWindow(wv.webview.Window())
}
}
}
func UpdateAvailable(ver string) error {
return app.t.UpdateAvailable(ver)
}
func osRun(shutdown func(), hasCompletedFirstRun, startHidden bool) {
var err error
app.shutdown = shutdown
app.t, err = wintray.NewTray(app)
if err != nil {
log.Fatalf("Failed to start: %s", err)
}
signals := make(chan os.Signal, 1)
signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
// TODO - can this be generalized?
go func() {
<-signals
slog.Debug("shutting down due to signal")
app.t.Quit()
wv.Terminate()
}()
// On windows, we run the final tasks in the main thread
// before starting the tray event loop. These final tasks
// may trigger the UI, and must do that from the main thread.
if !startHidden {
// Determine if the process was started from a shortcut
// ~\AppData\Roaming\Microsoft\Windows\Start Menu\Programs\Startup\Ollama
const STARTF_TITLEISLINKNAME = 0x00000800
var info windows.StartupInfo
if err := windows.GetStartupInfo(&info); err != nil {
slog.Debug("unable to retrieve startup info", "error", err)
} else if info.Flags&STARTF_TITLEISLINKNAME == STARTF_TITLEISLINKNAME {
linkPath := windows.UTF16PtrToString(info.Title)
if strings.Contains(linkPath, "Startup") {
startHidden = true
}
}
}
if startHidden {
startHiddenTasks()
} else {
ptr := wv.Run("/")
// Set the window icon using the tray icon
if ptr != nil {
iconHandle := app.t.GetIconHandle()
if iconHandle != 0 {
hwnd := uintptr(ptr)
const ICON_SMALL = 0
const ICON_BIG = 1
const WM_SETICON = 0x0080
pSendMessage.Call(hwnd, uintptr(WM_SETICON), uintptr(ICON_SMALL), uintptr(iconHandle))
pSendMessage.Call(hwnd, uintptr(WM_SETICON), uintptr(ICON_BIG), uintptr(iconHandle))
}
}
centerWindow(ptr)
}
if !hasCompletedFirstRun {
// Only create the login shortcut on first start
// so we can respect users deletion of the link
err = createLoginShortcut()
if err != nil {
slog.Warn("unable to create login shortcut", "error", err)
}
}
app.t.TrayRun() // This will block the main thread
}
func createLoginShortcut() error {
// The installer lays down a shortcut for us so we can copy it without
// having to resort to calling COM APIs to establish the shortcut
shortcutOrigin := filepath.Join(appPath, "lib", "Ollama.lnk")
_, err := os.Stat(startupShortcut)
if err != nil {
if errors.Is(err, os.ErrNotExist) {
in, err := os.Open(shortcutOrigin)
if err != nil {
return fmt.Errorf("unable to open shortcut %s : %w", shortcutOrigin, err)
}
defer in.Close()
out, err := os.Create(startupShortcut)
if err != nil {
return fmt.Errorf("unable to open startup link %s : %w", startupShortcut, err)
}
defer out.Close()
_, err = io.Copy(out, in)
if err != nil {
return fmt.Errorf("unable to copy shortcut %s : %w", startupShortcut, err)
}
err = out.Sync()
if err != nil {
return fmt.Errorf("unable to sync shortcut %s : %w", startupShortcut, err)
}
slog.Info("Created Startup shortcut", "shortcut", startupShortcut)
} else {
slog.Warn("unexpected error looking up Startup shortcut", "error", err)
}
} else {
slog.Debug("Startup link already exists", "shortcut", startupShortcut)
}
return nil
}
// Send a request to the main app thread to load a UI page
func sendUIRequestMessage(path string) {
wintray.SendUIRequestMessage(path)
}
func LaunchNewApp() {
}
func logStartup() {
slog.Info("starting Ollama", "app", appPath, "version", version.Version, "OS", updater.UserAgentOS)
}
const (
SW_HIDE = 0 // Hides the window
SW_SHOW = 5 // Shows window in its current size/position
SW_SHOWNA = 8 // Shows without activating
SW_MINIMIZE = 6 // Minimizes the window
SW_RESTORE = 9 // Restores to previous size/position
SW_SHOWDEFAULT = 10 // Sets show state based on program state
SM_CXSCREEN = 0
SM_CYSCREEN = 1
HWND_TOP = 0
SWP_NOSIZE = 0x0001
SWP_NOMOVE = 0x0002
SWP_NOZORDER = 0x0004
SWP_SHOWWINDOW = 0x0040
// Menu constants
MF_STRING = 0x00000000
MF_SEPARATOR = 0x00000800
MF_GRAYED = 0x00000001
TPM_RETURNCMD = 0x0100
)
// POINT structure for cursor position
type POINT struct {
X int32
Y int32
}
// Rect structure for GetWindowRect
type Rect struct {
Left int32
Top int32
Right int32
Bottom int32
}
func centerWindow(ptr unsafe.Pointer) {
hwnd := uintptr(ptr)
if hwnd == 0 {
return
}
var rect Rect
pGetWindowRect.Call(hwnd, uintptr(unsafe.Pointer(&rect)))
screenWidth, _, _ := pGetSystemMetrics.Call(uintptr(SM_CXSCREEN))
screenHeight, _, _ := pGetSystemMetrics.Call(uintptr(SM_CYSCREEN))
windowWidth := rect.Right - rect.Left
windowHeight := rect.Bottom - rect.Top
x := (int32(screenWidth) - windowWidth) / 2
y := (int32(screenHeight) - windowHeight) / 2
// Ensure the window is not positioned off-screen
if x < 0 {
x = 0
}
if y < 0 {
y = 0
}
pSetWindowPos.Call(
hwnd,
uintptr(HWND_TOP),
uintptr(x),
uintptr(y),
uintptr(windowWidth), // Keep original width
uintptr(windowHeight), // Keep original height
uintptr(SWP_SHOWWINDOW),
)
}
func showWindow(ptr unsafe.Pointer) {
hwnd := uintptr(ptr)
if hwnd != 0 {
iconHandle := app.t.GetIconHandle()
if iconHandle != 0 {
const ICON_SMALL = 0
const ICON_BIG = 1
const WM_SETICON = 0x0080
pSendMessage.Call(hwnd, uintptr(WM_SETICON), uintptr(ICON_SMALL), uintptr(iconHandle))
pSendMessage.Call(hwnd, uintptr(WM_SETICON), uintptr(ICON_BIG), uintptr(iconHandle))
}
// Check if window is minimized
isMinimized, _, _ := pIsIconic.Call(hwnd)
if isMinimized != 0 {
// Restore the window if it's minimized
pShowWindow.Call(hwnd, uintptr(SW_RESTORE))
}
// Show the window
pShowWindow.Call(hwnd, uintptr(SW_SHOW))
// Bring window to top
pBringWindowToTop.Call(hwnd)
// Force window to foreground
pSetForegroundWindow.Call(hwnd)
// Make it the active window
pSetActiveWindow.Call(hwnd)
// Ensure window is positioned on top
pSetWindowPos.Call(
hwnd,
uintptr(HWND_TOP),
0, 0, 0, 0,
uintptr(SWP_NOSIZE|SWP_NOMOVE|SWP_SHOWWINDOW),
)
}
}
// HideWindow hides the application window
func hideWindow(ptr unsafe.Pointer) {
hwnd := uintptr(ptr)
if hwnd != 0 {
pShowWindow.Call(
hwnd,
uintptr(SW_HIDE),
)
}
}
func runInBackground() {
exe, err := os.Executable()
if err != nil {
slog.Error("failed to get executable path", "error", err)
os.Exit(1)
}
cmd := exec.Command(exe, "hidden")
if cmd != nil {
err = cmd.Run()
if err != nil {
slog.Error("failed to run Ollama", "exe", exe, "error", err)
os.Exit(1)
}
} else {
slog.Error("failed to start Ollama", "exe", exe)
os.Exit(1)
}
}
func drag(ptr unsafe.Pointer) {}
func doubleClick(ptr unsafe.Pointer) {}
// checkAndHandleExistingInstance checks if another instance is running and sends the URL to it
func checkAndHandleExistingInstance(urlSchemeRequest string) bool {
if urlSchemeRequest == "" {
return false
}
// Try to send URL to existing instance using wintray messaging
if wintray.CheckAndSendToExistingInstance(urlSchemeRequest) {
os.Exit(0)
return true
}
// No existing instance, we'll handle it ourselves
return false
}

View File

@@ -1,27 +0,0 @@
#ifndef MENU_H
#define MENU_H
#ifdef __cplusplus
extern "C" {
#endif
typedef struct
{
char *label;
int enabled;
int separator;
} menuItem;
// TODO (jmorganca): these need to be forward declared in the webview.h file
// for now but ideally they should be in this header file on windows too
#ifndef WIN32
int menu_get_item_count();
void *menu_get_items();
void menu_handle_selection(char *item);
#endif
#ifdef __cplusplus
}
#endif
#endif

View File

@@ -1,528 +0,0 @@
//go:build windows || darwin
package main
// #include "menu.h"
import "C"
import (
"encoding/base64"
"encoding/json"
"fmt"
"log/slog"
"net/http"
"os"
"path/filepath"
"runtime"
"strings"
"sync"
"time"
"unsafe"
"github.com/ollama/ollama/app/dialog"
"github.com/ollama/ollama/app/store"
"github.com/ollama/ollama/app/webview"
)
type Webview struct {
port int
token string
webview webview.WebView
mutex sync.Mutex
Store *store.Store
}
// Run initializes the webview and starts its event loop.
// Note: this must be called from the primary app thread
// This returns the OS native window handle to the caller
func (w *Webview) Run(path string) unsafe.Pointer {
var url string
if devMode {
// In development mode, use the local dev server
url = fmt.Sprintf("http://localhost:5173%s", path)
} else {
url = fmt.Sprintf("http://127.0.0.1:%d%s", w.port, path)
}
w.mutex.Lock()
defer w.mutex.Unlock()
if w.webview == nil {
// Note: turning on debug on macos throws errors but is marginally functional for debugging
// TODO (jmorganca): we should pre-create the window and then provide it here to
// webview so we can hide it from the start and make other modifications
wv := webview.New(debug)
// start the window hidden
hideWindow(wv.Window())
wv.SetTitle("Ollama")
// TODO (jmorganca): this isn't working yet since it needs to be set
// on the first page load, ideally in an interstitial page like `/token`
// that exists only to set the cookie and redirect to /
// wv.Init(fmt.Sprintf(`document.cookie = "token=%s; path=/"`, w.token))
init := `
// Disable reload
document.addEventListener('keydown', function(e) {
if ((e.ctrlKey || e.metaKey) && e.key === 'r') {
e.preventDefault();
return false;
}
});
// Prevent back/forward navigation
window.addEventListener('popstate', function(e) {
e.preventDefault();
history.pushState(null, '', window.location.pathname);
return false;
});
// Clear history on load
window.addEventListener('load', function() {
history.pushState(null, '', window.location.pathname);
window.history.replaceState(null, '', window.location.pathname);
});
// Set token cookie
document.cookie = "token=` + w.token + `; path=/";
`
// Windows-specific scrollbar styling
if runtime.GOOS == "windows" {
init += `
// Fix scrollbar styling for Edge WebView2 on Windows only
function updateScrollbarStyles() {
const isDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
const existingStyle = document.getElementById('scrollbar-style');
if (existingStyle) existingStyle.remove();
const style = document.createElement('style');
style.id = 'scrollbar-style';
if (isDark) {
style.textContent = ` + "`" + `
::-webkit-scrollbar { width: 6px !important; height: 6px !important; }
::-webkit-scrollbar-track { background: #1a1a1a !important; }
::-webkit-scrollbar-thumb { background: #404040 !important; border-radius: 6px !important; }
::-webkit-scrollbar-thumb:hover { background: #505050 !important; }
::-webkit-scrollbar-corner { background: #1a1a1a !important; }
::-webkit-scrollbar-button {
background: transparent !important;
border: none !important;
width: 0px !important;
height: 0px !important;
margin: 0 !important;
padding: 0 !important;
}
::-webkit-scrollbar-button:vertical:start:decrement {
background: transparent !important;
height: 0px !important;
}
::-webkit-scrollbar-button:vertical:end:increment {
background: transparent !important;
height: 0px !important;
}
::-webkit-scrollbar-button:horizontal:start:decrement {
background: transparent !important;
width: 0px !important;
}
::-webkit-scrollbar-button:horizontal:end:increment {
background: transparent !important;
width: 0px !important;
}
` + "`" + `;
} else {
style.textContent = ` + "`" + `
::-webkit-scrollbar { width: 6px !important; height: 6px !important; }
::-webkit-scrollbar-track { background: #f0f0f0 !important; }
::-webkit-scrollbar-thumb { background: #c0c0c0 !important; border-radius: 6px !important; }
::-webkit-scrollbar-thumb:hover { background: #a0a0a0 !important; }
::-webkit-scrollbar-corner { background: #f0f0f0 !important; }
::-webkit-scrollbar-button {
background: transparent !important;
border: none !important;
width: 0px !important;
height: 0px !important;
margin: 0 !important;
padding: 0 !important;
}
::-webkit-scrollbar-button:vertical:start:decrement {
background: transparent !important;
height: 0px !important;
}
::-webkit-scrollbar-button:vertical:end:increment {
background: transparent !important;
height: 0px !important;
}
::-webkit-scrollbar-button:horizontal:start:decrement {
background: transparent !important;
width: 0px !important;
}
::-webkit-scrollbar-button:horizontal:end:increment {
background: transparent !important;
width: 0px !important;
}
` + "`" + `;
}
document.head.appendChild(style);
}
window.addEventListener('load', updateScrollbarStyles);
window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', updateScrollbarStyles);
`
}
// on windows make ctrl+n open new chat
// TODO (jmorganca): later we should use proper accelerators
// once we introduce a native menu for the window
// this is only used on windows since macOS uses the proper accelerators
if runtime.GOOS == "windows" {
init += `
document.addEventListener('keydown', function(e) {
if ((e.ctrlKey || e.metaKey) && e.key === 'n') {
e.preventDefault();
// Use the existing navigation method
history.pushState({}, '', '/c/new');
window.dispatchEvent(new PopStateEvent('popstate'));
return false;
}
});
`
}
init += `
window.OLLAMA_WEBSEARCH = true;
`
wv.Init(init)
// Add keyboard handler for zoom
wv.Init(`
window.addEventListener('keydown', function(e) {
// CMD/Ctrl + Plus/Equals (zoom in)
if ((e.metaKey || e.ctrlKey) && (e.key === '+' || e.key === '=')) {
e.preventDefault();
window.zoomIn && window.zoomIn();
return false;
}
// CMD/Ctrl + Minus (zoom out)
if ((e.metaKey || e.ctrlKey) && e.key === '-') {
e.preventDefault();
window.zoomOut && window.zoomOut();
return false;
}
// CMD/Ctrl + 0 (reset zoom)
if ((e.metaKey || e.ctrlKey) && e.key === '0') {
e.preventDefault();
window.zoomReset && window.zoomReset();
return false;
}
}, true);
`)
wv.Bind("zoomIn", func() {
current := wv.GetZoom()
wv.SetZoom(current + 0.1)
})
wv.Bind("zoomOut", func() {
current := wv.GetZoom()
wv.SetZoom(current - 0.1)
})
wv.Bind("zoomReset", func() {
wv.SetZoom(1.0)
})
wv.Bind("ready", func() {
showWindow(wv.Window())
})
wv.Bind("close", func() {
hideWindow(wv.Window())
})
// Webviews do not allow access to the file system by default, so we need to
// bind file system operations here
wv.Bind("selectModelsDirectory", func() {
go func() {
// Helper function to call the JavaScript callback with data or null
callCallback := func(data interface{}) {
dataJSON, _ := json.Marshal(data)
wv.Dispatch(func() {
wv.Eval(fmt.Sprintf("window.__selectModelsDirectoryCallback && window.__selectModelsDirectoryCallback(%s)", dataJSON))
})
}
directory, err := dialog.Directory().Title("Select Model Directory").ShowHidden(true).Browse()
if err != nil {
slog.Debug("Directory selection cancelled or failed", "error", err)
callCallback(nil)
return
}
slog.Debug("Directory selected", "path", directory)
callCallback(directory)
}()
})
// Bind selectFiles function for selecting multiple files at once
wv.Bind("selectFiles", func() {
go func() {
// Helper function to call the JavaScript callback with data or null
callCallback := func(data interface{}) {
dataJSON, _ := json.Marshal(data)
wv.Dispatch(func() {
wv.Eval(fmt.Sprintf("window.__selectFilesCallback && window.__selectFilesCallback(%s)", dataJSON))
})
}
// Define allowed extensions for native dialog filtering
textExts := []string{
"pdf", "docx", "txt", "md", "csv", "json", "xml", "html", "htm",
"js", "jsx", "ts", "tsx", "py", "java", "cpp", "c", "cc", "h", "cs", "php", "rb",
"go", "rs", "swift", "kt", "scala", "sh", "bat", "yaml", "yml", "toml", "ini",
"cfg", "conf", "log", "rtf",
}
imageExts := []string{"png", "jpg", "jpeg", "webp"}
allowedExts := append(textExts, imageExts...)
// Use native multiple file selection with extension filtering
filenames, err := dialog.File().
Filter("Supported Files", allowedExts...).
Title("Select Files").
LoadMultiple()
if err != nil {
slog.Debug("Multiple file selection cancelled or failed", "error", err)
callCallback(nil)
return
}
if len(filenames) == 0 {
callCallback(nil)
return
}
var files []map[string]string
maxFileSize := int64(10 * 1024 * 1024) // 10MB
for _, filename := range filenames {
// Check file extension (double-check after native dialog filtering)
ext := strings.ToLower(strings.TrimPrefix(filepath.Ext(filename), "."))
validExt := false
for _, allowedExt := range allowedExts {
if ext == allowedExt {
validExt = true
break
}
}
if !validExt {
slog.Warn("file extension not allowed, skipping", "filename", filepath.Base(filename), "extension", ext)
continue
}
// Check file size before reading (pre-filter large files)
fileStat, err := os.Stat(filename)
if err != nil {
slog.Error("failed to get file info", "error", err, "filename", filename)
continue
}
if fileStat.Size() > maxFileSize {
slog.Warn("file too large, skipping", "filename", filepath.Base(filename), "size", fileStat.Size())
continue
}
fileBytes, err := os.ReadFile(filename)
if err != nil {
slog.Error("failed to read file", "error", err, "filename", filename)
continue
}
mimeType := http.DetectContentType(fileBytes)
dataURL := fmt.Sprintf("data:%s;base64,%s", mimeType, base64.StdEncoding.EncodeToString(fileBytes))
fileResult := map[string]string{
"filename": filepath.Base(filename),
"path": filename,
"dataURL": dataURL,
}
files = append(files, fileResult)
}
if len(files) == 0 {
callCallback(nil)
} else {
callCallback(files)
}
}()
})
wv.Bind("drag", func() {
wv.Dispatch(func() {
drag(wv.Window())
})
})
wv.Bind("doubleClick", func() {
wv.Dispatch(func() {
doubleClick(wv.Window())
})
})
// Add binding for working directory selection
wv.Bind("selectWorkingDirectory", func() {
go func() {
// Helper function to call the JavaScript callback with data or null
callCallback := func(data interface{}) {
dataJSON, _ := json.Marshal(data)
wv.Dispatch(func() {
wv.Eval(fmt.Sprintf("window.__selectWorkingDirectoryCallback && window.__selectWorkingDirectoryCallback(%s)", dataJSON))
})
}
directory, err := dialog.Directory().Title("Select Working Directory").ShowHidden(true).Browse()
if err != nil {
slog.Debug("Directory selection cancelled or failed", "error", err)
callCallback(nil)
return
}
slog.Debug("Directory selected", "path", directory)
callCallback(directory)
}()
})
wv.Bind("setContextMenuItems", func(items []map[string]interface{}) error {
menuMutex.Lock()
defer menuMutex.Unlock()
if len(menuItems) > 0 {
pinner.Unpin()
}
menuItems = nil
for _, item := range items {
menuItem := C.menuItem{
label: C.CString(item["label"].(string)),
enabled: 0,
separator: 0,
}
if item["enabled"] != nil {
menuItem.enabled = 1
}
if item["separator"] != nil {
menuItem.separator = 1
}
menuItems = append(menuItems, menuItem)
}
return nil
})
// Debounce resize events
var resizeTimer *time.Timer
var resizeMutex sync.Mutex
wv.Bind("resize", func(width, height int) {
if w.Store != nil {
resizeMutex.Lock()
if resizeTimer != nil {
resizeTimer.Stop()
}
resizeTimer = time.AfterFunc(100*time.Millisecond, func() {
err := w.Store.SetWindowSize(width, height)
if err != nil {
slog.Error("failed to set window size", "error", err)
}
})
resizeMutex.Unlock()
}
})
// On Darwin, we can't have 2 threads both running global event loops
// but on Windows, the event loops are tied to the window, so we're
// able to run in both the tray and webview
if runtime.GOOS != "darwin" {
slog.Debug("starting webview event loop")
go func() {
wv.Run()
slog.Debug("webview event loop exited")
}()
}
if w.Store != nil {
width, height, err := w.Store.WindowSize()
if err != nil {
slog.Error("failed to get window size", "error", err)
}
if width > 0 && height > 0 {
wv.SetSize(width, height, webview.HintNone)
} else {
wv.SetSize(800, 600, webview.HintNone)
}
}
wv.SetSize(800, 600, webview.HintMin)
w.webview = wv
w.webview.Navigate(url)
} else {
w.webview.Eval(fmt.Sprintf(`
history.pushState({}, '', '%s');
`, path))
showWindow(w.webview.Window())
}
return w.webview.Window()
}
func (w *Webview) Terminate() {
w.mutex.Lock()
if w.webview == nil {
w.mutex.Unlock()
return
}
wv := w.webview
w.webview = nil
w.mutex.Unlock()
wv.Terminate()
wv.Destroy()
}
func (w *Webview) IsRunning() bool {
w.mutex.Lock()
defer w.mutex.Unlock()
return w.webview != nil
}
var (
menuItems []C.menuItem
menuMutex sync.RWMutex
pinner runtime.Pinner
)
//export menu_get_item_count
func menu_get_item_count() C.int {
menuMutex.RLock()
defer menuMutex.RUnlock()
return C.int(len(menuItems))
}
//export menu_get_items
func menu_get_items() unsafe.Pointer {
menuMutex.RLock()
defer menuMutex.RUnlock()
if len(menuItems) == 0 {
return nil
}
// Return pointer to the slice data
pinner.Pin(&menuItems[0])
return unsafe.Pointer(&menuItems[0])
}
//export menu_handle_selection
func menu_handle_selection(item *C.char) {
wv.webview.Eval(fmt.Sprintf("window.handleContextMenuResult('%s')", C.GoString(item)))
}

View File

@@ -1,40 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>English</string>
<key>CFBundleExecutable</key>
<string>Squirrel</string>
<key>CFBundleIconFile</key>
<string/>
<key>CFBundleIdentifier</key>
<string>com.github.Squirrel</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>Squirrel</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>DTCompiler</key>
<string>com.apple.compilers.llvm.clang.1_0</string>
<key>DTSDKBuild</key>
<string>22E245</string>
<key>DTSDKName</key>
<string>macosx13.3</string>
<key>DTXcode</key>
<string>1431</string>
<key>DTXcodeBuild</key>
<string>14E300c</string>
<key>NSHumanReadableCopyright</key>
<string>Copyright © 2013 GitHub. All rights reserved.</string>
<key>NSPrincipalClass</key>
<string/>
</dict>
</plist>

View File

@@ -1,51 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDisplayName</key>
<string>Ollama</string>
<key>CFBundleExecutable</key>
<string>Ollama</string>
<key>CFBundleIconFile</key>
<string>icon.icns</string>
<key>CFBundleIdentifier</key>
<string>com.electron.ollama</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>Ollama</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>0.0.0</string>
<key>CFBundleVersion</key>
<string>0.0.0</string>
<key>DTCompiler</key>
<string>com.apple.compilers.llvm.clang.1_0</string>
<key>DTSDKBuild</key>
<string>22E245</string>
<key>DTSDKName</key>
<string>macosx14.0</string>
<key>DTXcode</key>
<string>1431</string>
<key>DTXcodeBuild</key>
<string>14E300c</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.developer-tools</string>
<key>LSMinimumSystemVersion</key>
<string>14.0</string>
<key>LSUIElement</key>
<true/>
<key>CFBundleURLTypes</key>
<array>
<dict>
<key>CFBundleURLName</key>
<string>Ollama URL</string>
<key>CFBundleURLSchemes</key>
<array>
<string>ollama</string>
</array>
</dict>
</array>
</dict>
</plist>

View File

@@ -1,25 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.ollama.ollama</string>
<key>BundleProgram</key>
<string>Contents/Frameworks/Squirrel.framework/Versions/A/Squirrel</string>
<key>ProgramArguments</key>
<array>
<string>Contents/Frameworks/Squirrel.framework/Versions/A/Squirrel</string>
<string>background</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>LimitLoadToSessionType</key>
<string>Aqua</string>
<key>POSIXSpawnType</key>
<string>Interactive</string>
<key>LSUIElement</key>
<true/>
<key>LSBackgroundOnly</key>
<false/>
</dict>
</plist>

View File

Binary file not shown.

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 374 B

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 661 B

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 363 B

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 745 B

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 381 B

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 648 B

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 412 B

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 771 B

View File

@@ -1,15 +0,0 @@
ISC License
Copyright (c) 2018, the dialog authors.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@@ -1,43 +0,0 @@
#include <objc/NSObjCRuntime.h>
typedef enum {
MSG_YESNO,
MSG_ERROR,
MSG_INFO,
} AlertStyle;
typedef struct {
char* msg;
char* title;
AlertStyle style;
} AlertDlgParams;
#define LOADDLG 0
#define SAVEDLG 1
#define DIRDLG 2 // browse for directory
typedef struct {
int mode; /* which dialog style to invoke (see earlier defines) */
char* buf; /* buffer to store selected file */
int nbuf; /* number of bytes allocated at buf */
char* title; /* title for dialog box (can be nil) */
void** exts; /* list of valid extensions (elements actual type is NSString*) */
int numext; /* number of items in exts */
int relaxext; /* allow other extensions? */
char* startDir; /* directory to start in (can be nil) */
char* filename; /* default filename for dialog box (can be nil) */
int showHidden; /* show hidden files? */
int allowMultiple; /* allow multiple file selection? */
} FileDlgParams;
typedef enum {
DLG_OK,
DLG_CANCEL,
DLG_URLFAIL,
} DlgResult;
DlgResult alertDlg(AlertDlgParams*);
DlgResult fileDlg(FileDlgParams*);
void* NSStr(void* buf, int len);
void NSRelease(void* obj);

View File

@@ -1,208 +0,0 @@
#import <Cocoa/Cocoa.h>
#include "dlg.h"
#include <string.h>
#include <sys/syslimits.h>
// Import UniformTypeIdentifiers for macOS 11+
#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 110000
#import <UniformTypeIdentifiers/UniformTypeIdentifiers.h>
#endif
void* NSStr(void* buf, int len) {
return (void*)[[NSString alloc] initWithBytes:buf length:len encoding:NSUTF8StringEncoding];
}
void checkActivationPolicy() {
NSApplicationActivationPolicy policy = [NSApp activationPolicy];
// prohibited NSApp will not show the panel at all.
// It probably means that this is not run in a GUI app, that would set the policy on its own,
// but in a terminal app - setting it to accessory will allow dialogs to show
if (policy == NSApplicationActivationPolicyProhibited) {
[NSApp setActivationPolicy:NSApplicationActivationPolicyAccessory];
}
}
void NSRelease(void* obj) {
[(NSObject*)obj release];
}
@interface AlertDlg : NSObject {
AlertDlgParams* params;
DlgResult result;
}
+ (AlertDlg*)init:(AlertDlgParams*)params;
- (DlgResult)run;
@end
DlgResult alertDlg(AlertDlgParams* params) {
return [[AlertDlg init:params] run];
}
@implementation AlertDlg
+ (AlertDlg*)init:(AlertDlgParams*)params {
AlertDlg* d = [AlertDlg alloc];
d->params = params;
return d;
}
- (DlgResult)run {
if(![NSThread isMainThread]) {
[self performSelectorOnMainThread:@selector(run) withObject:nil waitUntilDone:YES];
return self->result;
}
NSAlert* alert = [[NSAlert alloc] init];
if(self->params->title != nil) {
[[alert window] setTitle:[[NSString alloc] initWithUTF8String:self->params->title]];
}
[alert setMessageText:[[NSString alloc] initWithUTF8String:self->params->msg]];
switch (self->params->style) {
case MSG_YESNO:
[alert addButtonWithTitle:@"Yes"];
[alert addButtonWithTitle:@"No"];
break;
case MSG_ERROR:
[alert setIcon:[NSImage imageNamed:NSImageNameCaution]];
[alert addButtonWithTitle:@"OK"];
break;
case MSG_INFO:
[alert setIcon:[NSImage imageNamed:NSImageNameInfo]];
[alert addButtonWithTitle:@"OK"];
break;
}
checkActivationPolicy();
self->result = [alert runModal] == NSAlertFirstButtonReturn ? DLG_OK : DLG_CANCEL;
return self->result;
}
@end
@interface FileDlg : NSObject {
FileDlgParams* params;
DlgResult result;
}
+ (FileDlg*)init:(FileDlgParams*)params;
- (DlgResult)run;
@end
DlgResult fileDlg(FileDlgParams* params) {
return [[FileDlg init:params] run];
}
@implementation FileDlg
+ (FileDlg*)init:(FileDlgParams*)params {
FileDlg* d = [FileDlg alloc];
d->params = params;
return d;
}
- (DlgResult)run {
if(![NSThread isMainThread]) {
[self performSelectorOnMainThread:@selector(run) withObject:nil waitUntilDone:YES];
} else if(self->params->mode == SAVEDLG) {
self->result = [self save];
} else {
self->result = [self load];
}
return self->result;
}
- (NSInteger)runPanel:(NSSavePanel*)panel {
[panel setFloatingPanel:YES];
[panel setShowsHiddenFiles:self->params->showHidden ? YES : NO];
[panel setCanCreateDirectories:YES];
if(self->params->title != nil) {
[panel setTitle:[[NSString alloc] initWithUTF8String:self->params->title]];
}
// Use modern allowedContentTypes API for better file type support (especially video files)
if(self->params->numext > 0) {
NSMutableArray *utTypes = [NSMutableArray arrayWithCapacity:self->params->numext];
NSString** exts = (NSString**)self->params->exts;
for(int i = 0; i < self->params->numext; i++) {
UTType *type = [UTType typeWithFilenameExtension:exts[i]];
if(type) {
[utTypes addObject:type];
}
}
if([utTypes count] > 0) {
[panel setAllowedContentTypes:utTypes];
}
}
if(self->params->relaxext) {
[panel setAllowsOtherFileTypes:YES];
}
if(self->params->startDir) {
[panel setDirectoryURL:[NSURL URLWithString:[[NSString alloc] initWithUTF8String:self->params->startDir]]];
}
if(self->params->filename != nil) {
[panel setNameFieldStringValue:[[NSString alloc] initWithUTF8String:self->params->filename]];
}
checkActivationPolicy();
return [panel runModal];
}
- (DlgResult)save {
NSSavePanel* panel = [NSSavePanel savePanel];
if(![self runPanel:panel]) {
return DLG_CANCEL;
} else if(![[panel URL] getFileSystemRepresentation:self->params->buf maxLength:self->params->nbuf]) {
return DLG_URLFAIL;
}
return DLG_OK;
}
- (DlgResult)load {
NSOpenPanel* panel = [NSOpenPanel openPanel];
if(self->params->mode == DIRDLG) {
[panel setCanChooseDirectories:YES];
[panel setCanChooseFiles:NO];
}
if(self->params->allowMultiple) {
[panel setAllowsMultipleSelection:YES];
}
if(![self runPanel:panel]) {
return DLG_CANCEL;
}
NSArray* urls = [panel URLs];
if(self->params->allowMultiple && [urls count] >= 1) {
// For multiple files, we need to return all paths separated by null bytes
char* bufPtr = self->params->buf;
int remainingBuf = self->params->nbuf;
// Calculate total required buffer size first
int totalSize = 0;
for(NSURL* url in urls) {
char tempBuf[PATH_MAX];
if(![url getFileSystemRepresentation:tempBuf maxLength:PATH_MAX]) {
return DLG_URLFAIL;
}
totalSize += strlen(tempBuf) + 1; // +1 for null terminator
}
totalSize += 1; // Final null terminator
if(totalSize > self->params->nbuf) {
// Not enough buffer space
return DLG_URLFAIL;
}
// Now actually copy the paths (we know we have space)
bufPtr = self->params->buf;
for(NSURL* url in urls) {
char tempBuf[PATH_MAX];
[url getFileSystemRepresentation:tempBuf maxLength:PATH_MAX];
int pathLen = strlen(tempBuf);
strcpy(bufPtr, tempBuf);
bufPtr += pathLen + 1;
}
*bufPtr = '\0'; // Final null terminator
}
return DLG_OK;
}
@end

View File

@@ -1,183 +0,0 @@
package cocoa
// #cgo darwin LDFLAGS: -framework Cocoa -framework UniformTypeIdentifiers
// #include <stdlib.h>
// #include <sys/syslimits.h>
// #include "dlg.h"
import "C"
import (
"bytes"
"errors"
"unsafe"
)
type AlertParams struct {
p C.AlertDlgParams
}
func mkAlertParams(msg, title string, style C.AlertStyle) *AlertParams {
a := AlertParams{C.AlertDlgParams{msg: C.CString(msg), style: style}}
if title != "" {
a.p.title = C.CString(title)
}
return &a
}
func (a *AlertParams) run() C.DlgResult {
return C.alertDlg(&a.p)
}
func (a *AlertParams) free() {
C.free(unsafe.Pointer(a.p.msg))
if a.p.title != nil {
C.free(unsafe.Pointer(a.p.title))
}
}
func nsStr(s string) unsafe.Pointer {
return C.NSStr(unsafe.Pointer(&[]byte(s)[0]), C.int(len(s)))
}
func YesNoDlg(msg, title string) bool {
a := mkAlertParams(msg, title, C.MSG_YESNO)
defer a.free()
return a.run() == C.DLG_OK
}
func InfoDlg(msg, title string) {
a := mkAlertParams(msg, title, C.MSG_INFO)
defer a.free()
a.run()
}
func ErrorDlg(msg, title string) {
a := mkAlertParams(msg, title, C.MSG_ERROR)
defer a.free()
a.run()
}
const (
BUFSIZE = C.PATH_MAX
MULTI_FILE_BUF_SIZE = 32768
)
// MultiFileDlg opens a file dialog that allows multiple file selection
func MultiFileDlg(title string, exts []string, relaxExt bool, startDir string, showHidden bool) ([]string, error) {
return fileDlgWithOptions(C.LOADDLG, title, exts, relaxExt, startDir, "", showHidden, true)
}
// FileDlg opens a file dialog for single file selection (kept for compatibility)
func FileDlg(save bool, title string, exts []string, relaxExt bool, startDir string, filename string, showHidden bool) (string, error) {
mode := C.LOADDLG
if save {
mode = C.SAVEDLG
}
files, err := fileDlgWithOptions(mode, title, exts, relaxExt, startDir, filename, showHidden, false)
if err != nil {
return "", err
}
if len(files) == 0 {
return "", nil
}
return files[0], nil
}
func DirDlg(title string, startDir string, showHidden bool) (string, error) {
files, err := fileDlgWithOptions(C.DIRDLG, title, nil, false, startDir, "", showHidden, false)
if err != nil {
return "", err
}
if len(files) == 0 {
return "", nil
}
return files[0], nil
}
// fileDlgWithOptions is the unified file dialog function that handles both single and multiple selection
func fileDlgWithOptions(mode int, title string, exts []string, relaxExt bool, startDir, filename string, showHidden, allowMultiple bool) ([]string, error) {
// Use larger buffer for multiple files, smaller for single
bufSize := BUFSIZE
if allowMultiple {
bufSize = MULTI_FILE_BUF_SIZE
}
p := C.FileDlgParams{
mode: C.int(mode),
nbuf: C.int(bufSize),
}
if allowMultiple {
p.allowMultiple = C.int(1) // Enable multiple selection //nolint:structcheck
}
if showHidden {
p.showHidden = 1
}
p.buf = (*C.char)(C.malloc(C.size_t(bufSize)))
defer C.free(unsafe.Pointer(p.buf))
buf := (*(*[MULTI_FILE_BUF_SIZE]byte)(unsafe.Pointer(p.buf)))[:bufSize]
if title != "" {
p.title = C.CString(title)
defer C.free(unsafe.Pointer(p.title))
}
if startDir != "" {
p.startDir = C.CString(startDir)
defer C.free(unsafe.Pointer(p.startDir))
}
if filename != "" {
p.filename = C.CString(filename)
defer C.free(unsafe.Pointer(p.filename))
}
if len(exts) > 0 {
if len(exts) > 999 {
panic("more than 999 extensions not supported")
}
ptrSize := int(unsafe.Sizeof(&title))
p.exts = (*unsafe.Pointer)(C.malloc(C.size_t(ptrSize * len(exts))))
defer C.free(unsafe.Pointer(p.exts))
cext := (*(*[999]unsafe.Pointer)(unsafe.Pointer(p.exts)))[:]
for i, ext := range exts {
cext[i] = nsStr(ext)
defer C.NSRelease(cext[i])
}
p.numext = C.int(len(exts))
if relaxExt {
p.relaxext = 1
}
}
// Execute dialog and parse results
switch C.fileDlg(&p) {
case C.DLG_OK:
if allowMultiple {
// Parse multiple null-terminated strings from buffer
var files []string
start := 0
for i := range len(buf) - 1 {
if buf[i] == 0 {
if i > start {
files = append(files, string(buf[start:i]))
}
start = i + 1
// Check for double null (end of list)
if i+1 < len(buf) && buf[i+1] == 0 {
break
}
}
}
return files, nil
} else {
// Single file - return as array for consistency
filename := string(buf[:bytes.Index(buf, []byte{0})])
return []string{filename}, nil
}
case C.DLG_CANCEL:
return nil, nil
case C.DLG_URLFAIL:
return nil, errors.New("failed to get file-system representation for selected URL")
}
panic("unhandled case")
}

View File

@@ -1,182 +0,0 @@
//go:build windows || darwin
// Package dialog provides a simple cross-platform common dialog API.
// Eg. to prompt the user with a yes/no dialog:
//
// if dialog.MsgDlg("%s", "Do you want to continue?").YesNo() {
// // user pressed Yes
// }
//
// The general usage pattern is to call one of the toplevel *Dlg functions
// which return a *Builder structure. From here you can optionally call
// configuration functions (eg. Title) to customise the dialog, before
// using a launcher function to run the dialog.
package dialog
import (
"errors"
"fmt"
)
// ErrCancelled is an error returned when a user cancels/closes a dialog.
var ErrCancelled = errors.New("Cancelled")
// Cancelled refers to ErrCancelled.
// Deprecated: Use ErrCancelled instead.
var Cancelled = ErrCancelled
// Dlg is the common type for dialogs.
type Dlg struct {
Title string
}
// MsgBuilder is used for creating message boxes.
type MsgBuilder struct {
Dlg
Msg string
}
// Message initialises a MsgBuilder with the provided message.
func Message(format string, args ...interface{}) *MsgBuilder {
return &MsgBuilder{Msg: fmt.Sprintf(format, args...)}
}
// Title specifies what the title of the message dialog will be.
func (b *MsgBuilder) Title(title string) *MsgBuilder {
b.Dlg.Title = title
return b
}
// YesNo spawns the message dialog with two buttons, "Yes" and "No".
// Returns true iff the user selected "Yes".
func (b *MsgBuilder) YesNo() bool {
return b.yesNo()
}
// Info spawns the message dialog with an information icon and single button, "Ok".
func (b *MsgBuilder) Info() {
b.info()
}
// Error spawns the message dialog with an error icon and single button, "Ok".
func (b *MsgBuilder) Error() {
b.error()
}
// FileFilter represents a category of files (eg. audio files, spreadsheets).
type FileFilter struct {
Desc string
Extensions []string
}
// FileBuilder is used for creating file browsing dialogs.
type FileBuilder struct {
Dlg
StartDir string
StartFile string
Filters []FileFilter
ShowHiddenFiles bool
}
// File initialises a FileBuilder using the default configuration.
func File() *FileBuilder {
return &FileBuilder{}
}
// Title specifies the title to be used for the dialog.
func (b *FileBuilder) Title(title string) *FileBuilder {
b.Dlg.Title = title
return b
}
// Filter adds a category of files to the types allowed by the dialog. Multiple
// calls to Filter are cumulative - any of the provided categories will be allowed.
// By default all files can be selected.
//
// The special extension '*' allows all files to be selected when the Filter is active.
func (b *FileBuilder) Filter(desc string, extensions ...string) *FileBuilder {
filt := FileFilter{desc, extensions}
if len(filt.Extensions) == 0 {
filt.Extensions = append(filt.Extensions, "*")
}
b.Filters = append(b.Filters, filt)
return b
}
// SetStartDir specifies the initial directory of the dialog.
func (b *FileBuilder) SetStartDir(startDir string) *FileBuilder {
b.StartDir = startDir
return b
}
// SetStartFile specifies the initial file name of the dialog.
func (b *FileBuilder) SetStartFile(startFile string) *FileBuilder {
b.StartFile = startFile
return b
}
// ShowHiddenFiles sets whether hidden files should be visible in the dialog.
func (b *FileBuilder) ShowHidden(show bool) *FileBuilder {
b.ShowHiddenFiles = show
return b
}
// Load spawns the file selection dialog using the configured settings,
// asking the user to select a single file. Returns ErrCancelled as the error
// if the user cancels or closes the dialog.
func (b *FileBuilder) Load() (string, error) {
return b.load()
}
// LoadMultiple spawns the file selection dialog using the configured settings,
// asking the user to select multiple files. Returns ErrCancelled as the error
// if the user cancels or closes the dialog.
func (b *FileBuilder) LoadMultiple() ([]string, error) {
return b.loadMultiple()
}
// Save spawns the file selection dialog using the configured settings,
// asking the user for a filename to save as. If the chosen file exists, the
// user is prompted whether they want to overwrite the file. Returns
// ErrCancelled as the error if the user cancels/closes the dialog, or selects
// not to overwrite the file.
func (b *FileBuilder) Save() (string, error) {
return b.save()
}
// DirectoryBuilder is used for directory browse dialogs.
type DirectoryBuilder struct {
Dlg
StartDir string
ShowHiddenFiles bool
}
// Directory initialises a DirectoryBuilder using the default configuration.
func Directory() *DirectoryBuilder {
return &DirectoryBuilder{}
}
// Browse spawns the directory selection dialog using the configured settings,
// asking the user to select a single folder. Returns ErrCancelled as the error
// if the user cancels or closes the dialog.
func (b *DirectoryBuilder) Browse() (string, error) {
return b.browse()
}
// Title specifies the title to be used for the dialog.
func (b *DirectoryBuilder) Title(title string) *DirectoryBuilder {
b.Dlg.Title = title
return b
}
// StartDir specifies the initial directory to be used for the dialog.
func (b *DirectoryBuilder) SetStartDir(dir string) *DirectoryBuilder {
b.StartDir = dir
return b
}
// ShowHiddenFiles sets whether hidden files should be visible in the dialog.
func (b *DirectoryBuilder) ShowHidden(show bool) *DirectoryBuilder {
b.ShowHiddenFiles = show
return b
}

View File

@@ -1,82 +0,0 @@
package dialog
import (
"github.com/ollama/ollama/app/dialog/cocoa"
)
func (b *MsgBuilder) yesNo() bool {
return cocoa.YesNoDlg(b.Msg, b.Dlg.Title)
}
func (b *MsgBuilder) info() {
cocoa.InfoDlg(b.Msg, b.Dlg.Title)
}
func (b *MsgBuilder) error() {
cocoa.ErrorDlg(b.Msg, b.Dlg.Title)
}
func (b *FileBuilder) load() (string, error) {
return b.run(false)
}
func (b *FileBuilder) loadMultiple() ([]string, error) {
return b.runMultiple()
}
func (b *FileBuilder) save() (string, error) {
return b.run(true)
}
func (b *FileBuilder) run(save bool) (string, error) {
star := false
var exts []string
for _, filt := range b.Filters {
for _, ext := range filt.Extensions {
if ext == "*" {
star = true
} else {
exts = append(exts, ext)
}
}
}
if star && save {
/* OSX doesn't allow the user to switch visible file types/extensions. Also
** NSSavePanel's allowsOtherFileTypes property has no effect for an open
** dialog, so if "*" is a possible extension we must always show all files. */
exts = nil
}
f, err := cocoa.FileDlg(save, b.Dlg.Title, exts, star, b.StartDir, b.StartFile, b.ShowHiddenFiles)
if f == "" && err == nil {
return "", ErrCancelled
}
return f, err
}
func (b *FileBuilder) runMultiple() ([]string, error) {
star := false
var exts []string
for _, filt := range b.Filters {
for _, ext := range filt.Extensions {
if ext == "*" {
star = true
} else {
exts = append(exts, ext)
}
}
}
files, err := cocoa.MultiFileDlg(b.Dlg.Title, exts, star, b.StartDir, b.ShowHiddenFiles)
if len(files) == 0 && err == nil {
return nil, ErrCancelled
}
return files, err
}
func (b *DirectoryBuilder) browse() (string, error) {
f, err := cocoa.DirDlg(b.Dlg.Title, b.StartDir, b.ShowHiddenFiles)
if f == "" && err == nil {
return "", ErrCancelled
}
return f, err
}

View File

@@ -1,241 +0,0 @@
package dialog
import (
"fmt"
"reflect"
"syscall"
"unicode/utf16"
"unsafe"
"github.com/TheTitanrain/w32"
)
const multiFileBufferSize = w32.MAX_PATH * 10
type WinDlgError int
func (e WinDlgError) Error() string {
return fmt.Sprintf("CommDlgExtendedError: %#x", e)
}
func err() error {
e := w32.CommDlgExtendedError()
if e == 0 {
return ErrCancelled
}
return WinDlgError(e)
}
func (b *MsgBuilder) yesNo() bool {
r := w32.MessageBox(w32.HWND(0), b.Msg, firstOf(b.Dlg.Title, "Confirm?"), w32.MB_YESNO)
return r == w32.IDYES
}
func (b *MsgBuilder) info() {
w32.MessageBox(w32.HWND(0), b.Msg, firstOf(b.Dlg.Title, "Information"), w32.MB_OK|w32.MB_ICONINFORMATION)
}
func (b *MsgBuilder) error() {
w32.MessageBox(w32.HWND(0), b.Msg, firstOf(b.Dlg.Title, "Error"), w32.MB_OK|w32.MB_ICONERROR)
}
type filedlg struct {
buf []uint16
filters []uint16
opf *w32.OPENFILENAME
}
func (d filedlg) Filename() string {
i := 0
for i < len(d.buf) && d.buf[i] != 0 {
i++
}
return string(utf16.Decode(d.buf[:i]))
}
func (d filedlg) parseMultipleFilenames() []string {
var files []string
i := 0
// Find first null terminator (directory path)
for i < len(d.buf) && d.buf[i] != 0 {
i++
}
if i >= len(d.buf) {
return files
}
// Get directory path
dirPath := string(utf16.Decode(d.buf[:i]))
i++ // Skip null terminator
// Check if there are more files (multiple selection)
if i < len(d.buf) && d.buf[i] != 0 {
// Multiple files selected - parse filenames
for i < len(d.buf) {
start := i
// Find next null terminator
for i < len(d.buf) && d.buf[i] != 0 {
i++
}
if i >= len(d.buf) {
break
}
if start < i {
filename := string(utf16.Decode(d.buf[start:i]))
if dirPath != "" {
files = append(files, dirPath+"\\"+filename)
} else {
files = append(files, filename)
}
}
i++ // Skip null terminator
if i >= len(d.buf) || d.buf[i] == 0 {
break // End of list
}
}
} else {
// Single file selected
files = append(files, dirPath)
}
return files
}
func (b *FileBuilder) load() (string, error) {
d := openfile(w32.OFN_FILEMUSTEXIST|w32.OFN_NOCHANGEDIR, b)
if w32.GetOpenFileName(d.opf) {
return d.Filename(), nil
}
return "", err()
}
func (b *FileBuilder) loadMultiple() ([]string, error) {
d := openfile(w32.OFN_FILEMUSTEXIST|w32.OFN_NOCHANGEDIR|w32.OFN_ALLOWMULTISELECT|w32.OFN_EXPLORER, b)
d.buf = make([]uint16, multiFileBufferSize)
d.opf.File = utf16ptr(d.buf)
d.opf.MaxFile = uint32(len(d.buf))
if w32.GetOpenFileName(d.opf) {
return d.parseMultipleFilenames(), nil
}
return nil, err()
}
func (b *FileBuilder) save() (string, error) {
d := openfile(w32.OFN_OVERWRITEPROMPT|w32.OFN_NOCHANGEDIR, b)
if w32.GetSaveFileName(d.opf) {
return d.Filename(), nil
}
return "", err()
}
/* syscall.UTF16PtrFromString not sufficient because we need to encode embedded NUL bytes */
func utf16ptr(utf16 []uint16) *uint16 {
if utf16[len(utf16)-1] != 0 {
panic("refusing to make ptr to non-NUL terminated utf16 slice")
}
h := (*reflect.SliceHeader)(unsafe.Pointer(&utf16))
return (*uint16)(unsafe.Pointer(h.Data))
}
func utf16slice(ptr *uint16) []uint16 { //nolint:unused
hdr := reflect.SliceHeader{Data: uintptr(unsafe.Pointer(ptr)), Len: 1, Cap: 1}
slice := *((*[]uint16)(unsafe.Pointer(&hdr))) //nolint:govet
i := 0
for slice[len(slice)-1] != 0 {
i++
}
hdr.Len = i
slice = *((*[]uint16)(unsafe.Pointer(&hdr))) //nolint:govet
return slice
}
func openfile(flags uint32, b *FileBuilder) (d filedlg) {
d.buf = make([]uint16, w32.MAX_PATH)
if b.StartFile != "" {
initialName, _ := syscall.UTF16FromString(b.StartFile)
for i := 0; i < len(initialName) && i < w32.MAX_PATH; i++ {
d.buf[i] = initialName[i]
}
}
d.opf = &w32.OPENFILENAME{
File: utf16ptr(d.buf),
MaxFile: uint32(len(d.buf)),
Flags: flags,
}
d.opf.StructSize = uint32(unsafe.Sizeof(*d.opf))
if b.StartDir != "" {
d.opf.InitialDir, _ = syscall.UTF16PtrFromString(b.StartDir)
}
if b.Dlg.Title != "" {
d.opf.Title, _ = syscall.UTF16PtrFromString(b.Dlg.Title)
}
for _, filt := range b.Filters {
/* build utf16 string of form "Music File\0*.mp3;*.ogg;*.wav;\0" */
d.filters = append(d.filters, utf16.Encode([]rune(filt.Desc))...)
d.filters = append(d.filters, 0)
for _, ext := range filt.Extensions {
s := fmt.Sprintf("*.%s;", ext)
d.filters = append(d.filters, utf16.Encode([]rune(s))...)
}
d.filters = append(d.filters, 0)
}
if d.filters != nil {
d.filters = append(d.filters, 0, 0) // two extra NUL chars to terminate the list
d.opf.Filter = utf16ptr(d.filters)
}
return d
}
type dirdlg struct {
bi *w32.BROWSEINFO
}
const (
bffm_INITIALIZED = 1
bffm_SELCHANGED = 2
bffm_VALIDATEFAILEDA = 3
bffm_VALIDATEFAILEDW = 4
bffm_SETSTATUSTEXTA = (w32.WM_USER + 100)
bffm_SETSTATUSTEXTW = (w32.WM_USER + 104)
bffm_ENABLEOK = (w32.WM_USER + 101)
bffm_SETSELECTIONA = (w32.WM_USER + 102)
bffm_SETSELECTIONW = (w32.WM_USER + 103)
bffm_SETOKTEXT = (w32.WM_USER + 105)
bffm_SETEXPANDED = (w32.WM_USER + 106)
bffm_SETSTATUSTEXT = bffm_SETSTATUSTEXTW
bffm_SETSELECTION = bffm_SETSELECTIONW
bffm_VALIDATEFAILED = bffm_VALIDATEFAILEDW
)
func callbackDefaultDir(hwnd w32.HWND, msg uint, lParam, lpData uintptr) int {
if msg == bffm_INITIALIZED {
_ = w32.SendMessage(hwnd, bffm_SETSELECTION, w32.TRUE, lpData)
}
return 0
}
func selectdir(b *DirectoryBuilder) (d dirdlg) {
d.bi = &w32.BROWSEINFO{Flags: w32.BIF_RETURNONLYFSDIRS | w32.BIF_NEWDIALOGSTYLE}
if b.Dlg.Title != "" {
d.bi.Title, _ = syscall.UTF16PtrFromString(b.Dlg.Title)
}
if b.StartDir != "" {
s16, _ := syscall.UTF16PtrFromString(b.StartDir)
d.bi.LParam = uintptr(unsafe.Pointer(s16))
d.bi.CallbackFunc = syscall.NewCallback(callbackDefaultDir)
}
return d
}
func (b *DirectoryBuilder) browse() (string, error) {
d := selectdir(b)
res := w32.SHBrowseForFolder(d.bi)
if res == 0 {
return "", ErrCancelled
}
return w32.SHGetPathFromIDList(res), nil
}

View File

@@ -1,12 +0,0 @@
//go:build windows
package dialog
func firstOf(args ...string) string {
for _, arg := range args {
if arg != "" {
return arg
}
}
return ""
}

80
app/forge.config.ts Normal file
View File

@@ -0,0 +1,80 @@
import type { ForgeConfig, ResolvedForgeConfig, ForgeMakeResult } from '@electron-forge/shared-types'
import { MakerSquirrel } from '@electron-forge/maker-squirrel'
import { MakerZIP } from '@electron-forge/maker-zip'
import { PublisherGithub } from '@electron-forge/publisher-github'
import { AutoUnpackNativesPlugin } from '@electron-forge/plugin-auto-unpack-natives'
import { WebpackPlugin } from '@electron-forge/plugin-webpack'
import * as path from 'path'
import * as fs from 'fs'
import { mainConfig } from './webpack.main.config'
import { rendererConfig } from './webpack.renderer.config'
const packageJson = JSON.parse(fs.readFileSync(path.resolve(__dirname, './package.json'), 'utf8'))
const config: ForgeConfig = {
packagerConfig: {
appVersion: process.env.VERSION || packageJson.version,
asar: true,
icon: './assets/icon.icns',
extraResource: [
'../ollama',
path.join(__dirname, './assets/ollama_icon_16x16Template.png'),
path.join(__dirname, './assets/ollama_icon_16x16Template@2x.png'),
...(process.platform === 'darwin' ? ['../llama/ggml-metal.metal'] : []),
],
...(process.env.SIGN
? {
osxSign: {
identity: process.env.APPLE_IDENTITY,
},
osxNotarize: {
tool: 'notarytool',
appleId: process.env.APPLE_ID || '',
appleIdPassword: process.env.APPLE_PASSWORD || '',
teamId: process.env.APPLE_TEAM_ID || '',
},
}
: {}),
},
rebuildConfig: {},
makers: [new MakerSquirrel({}), new MakerZIP({}, ['darwin'])],
publishers: [
new PublisherGithub({
repository: {
name: 'ollama',
owner: 'jmorganca',
},
draft: false,
prerelease: true,
}),
],
hooks: {
readPackageJson: async (_, packageJson) => {
return { ...packageJson, version: process.env.VERSION || packageJson.version }
},
},
plugins: [
new AutoUnpackNativesPlugin({}),
new WebpackPlugin({
mainConfig,
devContentSecurityPolicy: `default-src * 'unsafe-eval' 'unsafe-inline'; img-src data: 'self'`,
renderer: {
config: rendererConfig,
nodeIntegration: true,
entryPoints: [
{
html: './src/index.html',
js: './src/renderer.tsx',
name: 'main_window',
preload: {
js: './src/preload.ts',
},
},
],
},
}),
],
}
export default config

View File

@@ -1,30 +0,0 @@
//go:build windows || darwin
package format
import (
"strings"
"unicode"
)
// KebabCase converts a string from camelCase or PascalCase to kebab-case.
// (e.g. "camelCase" -> "camel-case")
func KebabCase(str string) string {
var result strings.Builder
for i, char := range str {
if i > 0 {
prevChar := rune(str[i-1])
// Add hyphen before uppercase letters
if unicode.IsUpper(char) &&
(unicode.IsLower(prevChar) || unicode.IsDigit(prevChar) ||
(i < len(str)-1 && unicode.IsLower(rune(str[i+1])))) {
result.WriteRune('-')
}
}
result.WriteRune(unicode.ToLower(char))
}
return result.String()
}

View File

@@ -1,34 +0,0 @@
//go:build windows || darwin
package format
import "testing"
func TestKebabCase(t *testing.T) {
tests := []struct {
input string
expected string
}{
{"already-kebab-case", "already-kebab-case"},
{"simpleCamelCase", "simple-camel-case"},
{"PascalCase", "pascal-case"},
{"camelCaseWithNumber123", "camel-case-with-number123"},
{"APIResponse", "api-response"},
{"mixedCASE", "mixed-case"},
{"WithACRONYMS", "with-acronyms"},
{"ALLCAPS", "allcaps"},
{"camelCaseWITHMixedACRONYMS", "camel-case-with-mixed-acronyms"},
{"numbers123in456string", "numbers123in456string"},
{"5", "5"},
{"S", "s"},
}
for _, tt := range tests {
t.Run(tt.input, func(t *testing.T) {
result := KebabCase(tt.input)
if result != tt.expected {
t.Errorf("toKebabCase(%q) = %q, want %q", tt.input, result, tt.expected)
}
})
}
}

View File

@@ -1,45 +0,0 @@
//go:build windows || darwin
// package logrotate provides utilities for rotating logs
// TODO (jmorgan): this most likely doesn't need it's own
// package and can be moved to app where log files are created
package logrotate
import (
"log/slog"
"os"
"strconv"
"strings"
)
const MaxLogFiles = 5
func Rotate(filename string) {
if _, err := os.Stat(filename); os.IsNotExist(err) {
return
}
index := strings.LastIndex(filename, ".")
pre := filename[:index]
post := "." + filename[index+1:]
for i := MaxLogFiles; i > 0; i-- {
older := pre + "-" + strconv.Itoa(i) + post
newer := pre + "-" + strconv.Itoa(i-1) + post
if i == 1 {
newer = pre + post
}
if _, err := os.Stat(newer); err == nil {
if _, err := os.Stat(older); err == nil {
err := os.Remove(older)
if err != nil {
slog.Warn("Failed to remove older log", "older", older, "error", err)
continue
}
}
err := os.Rename(newer, older)
if err != nil {
slog.Warn("Failed to rotate log", "older", older, "newer", newer, "error", err)
}
}
}
}

View File

@@ -1,70 +0,0 @@
//go:build windows || darwin
package logrotate
import (
"os"
"path/filepath"
"strconv"
"testing"
)
func TestRotate(t *testing.T) {
logDir := t.TempDir()
logFile := filepath.Join(logDir, "testlog.log")
// No log exists
Rotate(logFile)
if err := os.WriteFile(logFile, []byte("1"), 0o644); err != nil {
t.Fatal(err)
}
if _, err := os.Stat(logFile); os.IsNotExist(err) {
t.Fatal("expected log file to exist")
}
// First rotation
Rotate(logFile)
if _, err := os.Stat(filepath.Join(logDir, "testlog-1.log")); os.IsNotExist(err) {
t.Fatal("expected rotated log file to exist")
}
if _, err := os.Stat(filepath.Join(logDir, "testlog-2.log")); !os.IsNotExist(err) {
t.Fatal("expected no second rotated log file")
}
if _, err := os.Stat(logFile); !os.IsNotExist(err) {
t.Fatal("expected original log file to be moved")
}
// Should be a no-op without a new log
Rotate(logFile)
if _, err := os.Stat(filepath.Join(logDir, "testlog-1.log")); os.IsNotExist(err) {
t.Fatal("expected rotated log file to still exist")
}
if _, err := os.Stat(filepath.Join(logDir, "testlog-2.log")); !os.IsNotExist(err) {
t.Fatal("expected no second rotated log file")
}
if _, err := os.Stat(logFile); !os.IsNotExist(err) {
t.Fatal("expected no original log file")
}
for i := 2; i <= MaxLogFiles+1; i++ {
if err := os.WriteFile(logFile, []byte(strconv.Itoa(i)), 0o644); err != nil {
t.Fatal(err)
}
if _, err := os.Stat(logFile); os.IsNotExist(err) {
t.Fatal("expected log file to exist")
}
Rotate(logFile)
if _, err := os.Stat(logFile); !os.IsNotExist(err) {
t.Fatal("expected log file to be moved")
}
for j := 1; j < i; j++ {
if _, err := os.Stat(filepath.Join(logDir, "testlog-"+strconv.Itoa(j)+".log")); os.IsNotExist(err) {
t.Fatalf("expected rotated log file %d to exist", j)
}
}
if _, err := os.Stat(filepath.Join(logDir, "testlog-"+strconv.Itoa(i+1)+".log")); !os.IsNotExist(err) {
t.Fatalf("expected no rotated log file %d", i+1)
}
}
}

View File

@@ -1,374 +0,0 @@
; Inno Setup Installer for Ollama
;
; To build the installer use the build script invoked from the top of the source tree
;
; powershell -ExecutionPolicy Bypass -File .\scripts\build_windows.ps
#define MyAppName "Ollama"
#if GetEnv("PKG_VERSION") != ""
#define MyAppVersion GetEnv("PKG_VERSION")
#else
#define MyAppVersion "0.0.0"
#endif
#define MyAppPublisher "Ollama"
#define MyAppURL "https://ollama.com/"
#define MyAppExeName "ollama app.exe"
#define MyIcon ".\assets\app.ico"
[Setup]
; NOTE: The value of AppId uniquely identifies this application. Do not use the same AppId value in installers for other applications.
; (To generate a new GUID, click Tools | Generate GUID inside the IDE.)
AppId={{44E83376-CE68-45EB-8FC1-393500EB558C}
AppName={#MyAppName}
AppVersion={#MyAppVersion}
VersionInfoVersion={#MyAppVersion}
;AppVerName={#MyAppName} {#MyAppVersion}
AppPublisher={#MyAppPublisher}
AppPublisherURL={#MyAppURL}
AppSupportURL={#MyAppURL}
AppUpdatesURL={#MyAppURL}
ArchitecturesAllowed=x64compatible arm64
ArchitecturesInstallIn64BitMode=x64compatible arm64
DefaultDirName={localappdata}\Programs\{#MyAppName}
DefaultGroupName={#MyAppName}
DisableProgramGroupPage=yes
PrivilegesRequired=lowest
OutputBaseFilename="OllamaSetup"
SetupIconFile={#MyIcon}
UninstallDisplayIcon={uninstallexe}
Compression=lzma2/ultra64
LZMAUseSeparateProcess=yes
LZMANumBlockThreads=8
SolidCompression=yes
WizardStyle=modern
ChangesEnvironment=yes
OutputDir=..\dist\
; Disable logging once everything's battle tested
; Filename will be %TEMP%\Setup Log*.txt
SetupLogging=yes
CloseApplications=no
RestartApplications=no
RestartIfNeededByRun=no
; https://jrsoftware.org/ishelp/index.php?topic=setup_wizardimagefile
WizardSmallImageFile=.\assets\setup.bmp
; Ollama requires Windows 10 22H2 or newer for proper unicode rendering
; TODO: consider setting this to 10.0.19045
MinVersion=10.0.10240
; First release that supports WinRT UI Composition for win32 apps
; MinVersion=10.0.17134
; First release with XAML Islands - possible UI path forward
; MinVersion=10.0.18362
; quiet...
DisableDirPage=yes
DisableFinishedPage=yes
DisableReadyMemo=yes
DisableReadyPage=yes
DisableStartupPrompt=yes
; TODO - percentage can't be set less than 100, so how to make it shorter?
; WizardSizePercent=100,80
#if GetEnv("KEY_CONTAINER")
SignTool=MySignTool
SignedUninstaller=yes
#endif
SetupMutex=OllamaSetupMutex
[Languages]
Name: "english"; MessagesFile: "compiler:Default.isl"
[LangOptions]
DialogFontSize=12
[Files]
#if FileExists("..\dist\windows-ollama-app-amd64.exe")
Source: "..\dist\windows-ollama-app-amd64.exe"; DestDir: "{app}"; DestName: "{#MyAppExeName}" ;Check: not IsArm64(); Flags: ignoreversion 64bit; BeforeInstall: TaskKill('{#MyAppExeName}')
Source: "..\dist\windows-amd64\vc_redist.x64.exe"; DestDir: "{tmp}"; Check: not IsArm64() and vc_redist_needed(); Flags: deleteafterinstall
Source: "..\dist\windows-amd64\ollama.exe"; DestDir: "{app}"; Check: not IsArm64(); Flags: ignoreversion 64bit; BeforeInstall: TaskKill('ollama.exe')
Source: "..\dist\windows-amd64\lib\ollama\*"; DestDir: "{app}\lib\ollama\"; Check: not IsArm64(); Flags: ignoreversion 64bit recursesubdirs
#endif
; For local development, rely on binary compatibility at runtime since we can't cross compile
#if FileExists("..\dist\windows-ollama-app-arm64.exe")
Source: "..\dist\windows-ollama-app-arm64.exe"; DestDir: "{app}"; DestName: "{#MyAppExeName}" ;Check: IsArm64(); Flags: ignoreversion 64bit; BeforeInstall: TaskKill('{#MyAppExeName}')
#else
Source: "..\dist\windows-ollama-app-amd64.exe"; DestDir: "{app}"; DestName: "{#MyAppExeName}" ;Check: IsArm64(); Flags: ignoreversion 64bit; BeforeInstall: TaskKill('{#MyAppExeName}')
#endif
#if FileExists("..\dist\windows-arm64\ollama.exe")
Source: "..\dist\windows-arm64\vc_redist.arm64.exe"; DestDir: "{tmp}"; Check: IsArm64() and vc_redist_needed(); Flags: deleteafterinstall
Source: "..\dist\windows-arm64\ollama.exe"; DestDir: "{app}"; Check: IsArm64(); Flags: ignoreversion 64bit; BeforeInstall: TaskKill('ollama.exe')
#endif
Source: ".\assets\app.ico"; DestDir: "{app}"; Flags: ignoreversion
[Icons]
Name: "{group}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico"
Name: "{app}\lib\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico"
Name: "{userprograms}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico"
[InstallDelete]
Type: files; Name: "{%LOCALAPPDATA}\Ollama\updates"
[Run]
#if DirExists("..\dist\windows-arm64")
Filename: "{tmp}\vc_redist.arm64.exe"; Parameters: "/install /passive /norestart"; Check: IsArm64() and vc_redist_needed(); StatusMsg: "Installing VC++ Redistributables..."; Flags: waituntilterminated
#endif
#if DirExists("..\dist\windows-amd64")
Filename: "{tmp}\vc_redist.x64.exe"; Parameters: "/install /passive /norestart"; Check: not IsArm64() and vc_redist_needed(); StatusMsg: "Installing VC++ Redistributables..."; Flags: waituntilterminated
#endif
Filename: "{cmd}"; Parameters: "/C set PATH={app};%PATH% & ""{app}\{#MyAppExeName}"""; Flags: postinstall nowait runhidden
[UninstallRun]
; Filename: "{cmd}"; Parameters: "/C ""taskkill /im ''{#MyAppExeName}'' /f /t"; Flags: runhidden
; Filename: "{cmd}"; Parameters: "/C ""taskkill /im ollama.exe /f /t"; Flags: runhidden
Filename: "taskkill"; Parameters: "/im ""{#MyAppExeName}"" /f /t"; Flags: runhidden
Filename: "taskkill"; Parameters: "/im ""ollama.exe"" /f /t"; Flags: runhidden
; HACK! need to give the server and app enough time to exit
; TODO - convert this to a Pascal code script so it waits until they're no longer running, then completes
Filename: "{cmd}"; Parameters: "/c timeout 5"; Flags: runhidden
[UninstallDelete]
Type: filesandordirs; Name: "{%TEMP}\ollama*"
Type: filesandordirs; Name: "{%LOCALAPPDATA}\Ollama"
Type: filesandordirs; Name: "{%LOCALAPPDATA}\Programs\Ollama"
Type: filesandordirs; Name: "{%USERPROFILE}\.ollama\history"
Type: filesandordirs; Name: "{userstartup}\{#MyAppName}.lnk"
; NOTE: if the user has a custom OLLAMA_MODELS it will be preserved
[InstallDelete]
Type: filesandordirs; Name: "{%TEMP}\ollama*"
Type: filesandordirs; Name: "{app}\lib\ollama"
[Messages]
WizardReady=Ollama
ReadyLabel1=%nLet's get you up and running with your own large language models.
SetupAppRunningError=Another Ollama installer is running.%n%nPlease cancel or finish the other installer, then click OK to continue with this install, or Cancel to exit.
;FinishedHeadingLabel=Run your first model
;FinishedLabel=%nRun this command in a PowerShell or cmd terminal.%n%n%n ollama run llama3.2
;ClickFinish=%n
[Registry]
Root: HKCU; Subkey: "Environment"; \
ValueType: expandsz; ValueName: "Path"; ValueData: "{olddata};{app}"; \
Check: NeedsAddPath('{app}')
; Register ollama:// URL protocol
Root: HKCU; Subkey: "Software\Classes\ollama"; ValueType: string; ValueName: ""; ValueData: "URL:Ollama Protocol"; Flags: uninsdeletekey
Root: HKCU; Subkey: "Software\Classes\ollama"; ValueType: string; ValueName: "URL Protocol"; ValueData: ""; Flags: uninsdeletekey
Root: HKCU; Subkey: "Software\Classes\ollama\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#MyAppExeName}"" ""%1"""; Flags: uninsdeletekey
[Code]
function NeedsAddPath(Param: string): boolean;
var
OrigPath: string;
begin
if not RegQueryStringValue(HKEY_CURRENT_USER,
'Environment',
'Path', OrigPath)
then begin
Result := True;
exit;
end;
{ look for the path with leading and trailing semicolon }
{ Pos() returns 0 if not found }
Result := Pos(';' + ExpandConstant(Param) + ';', ';' + OrigPath + ';') = 0;
end;
{ --- VC Runtime libraries discovery code - Only install vc_redist if it isn't already installed ----- }
const VCRTL_MIN_V1 = 14;
const VCRTL_MIN_V2 = 40;
const VCRTL_MIN_V3 = 33807;
const VCRTL_MIN_V4 = 0;
// check if the minimum required vc redist is installed (by looking the registry)
function vc_redist_needed (): Boolean;
var
sRegKey: string;
v1: Cardinal;
v2: Cardinal;
v3: Cardinal;
v4: Cardinal;
begin
if (IsArm64()) then begin
sRegKey := 'SOFTWARE\WOW6432Node\Microsoft\VisualStudio\14.0\VC\Runtimes\arm64';
end else begin
sRegKey := 'SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x64';
end;
if (RegQueryDWordValue (HKEY_LOCAL_MACHINE, sRegKey, 'Major', v1) and
RegQueryDWordValue (HKEY_LOCAL_MACHINE, sRegKey, 'Minor', v2) and
RegQueryDWordValue (HKEY_LOCAL_MACHINE, sRegKey, 'Bld', v3) and
RegQueryDWordValue (HKEY_LOCAL_MACHINE, sRegKey, 'RBld', v4)) then
begin
Log ('VC Redist version: ' + IntToStr (v1) +
'.' + IntToStr (v2) + '.' + IntToStr (v3) +
'.' + IntToStr (v4));
{ Version info was found. Return true if later or equal to our
minimal required version RTL_MIN_Vx }
Result := not (
(v1 > VCRTL_MIN_V1) or ((v1 = VCRTL_MIN_V1) and
((v2 > VCRTL_MIN_V2) or ((v2 = VCRTL_MIN_V2) and
((v3 > VCRTL_MIN_V3) or ((v3 = VCRTL_MIN_V3) and
(v4 >= VCRTL_MIN_V4)))))));
end
else
Result := TRUE;
end;
function GetDirSize(Path: String): Int64;
var
FindRec: TFindRec;
FilePath: string;
Size: Int64;
begin
if FindFirst(Path + '\*', FindRec) then begin
Result := 0;
try
repeat
if (FindRec.Name <> '.') and (FindRec.Name <> '..') then begin
FilePath := Path + '\' + FindRec.Name;
if (FindRec.Attributes and FILE_ATTRIBUTE_DIRECTORY) <> 0 then begin
Size := GetDirSize(FilePath);
end else begin
Size := Int64(FindRec.SizeHigh) shl 32 + FindRec.SizeLow;
end;
Result := Result + Size;
end;
until not FindNext(FindRec);
finally
FindClose(FindRec);
end;
end else begin
Log(Format('Failed to list %s', [Path]));
Result := -1;
end;
end;
var
DeleteModelsChecked: Boolean;
ModelsDir: string;
procedure InitializeUninstallProgressForm();
var
UninstallPage: TNewNotebookPage;
UninstallButton: TNewButton;
DeleteModelsCheckbox: TNewCheckBox;
OriginalPageNameLabel: string;
OriginalPageDescriptionLabel: string;
OriginalCancelButtonEnabled: Boolean;
OriginalCancelButtonModalResult: Integer;
ctrl: TWinControl;
ModelDirA: AnsiString;
ModelsSize: Int64;
begin
if not UninstallSilent then begin
ctrl := UninstallProgressForm.CancelButton;
UninstallButton := TNewButton.Create(UninstallProgressForm);
UninstallButton.Parent := UninstallProgressForm;
UninstallButton.Left := ctrl.Left - ctrl.Width - ScaleX(10);
UninstallButton.Top := ctrl.Top;
UninstallButton.Width := ctrl.Width;
UninstallButton.Height := ctrl.Height;
UninstallButton.TabOrder := ctrl.TabOrder;
UninstallButton.Caption := 'Uninstall';
UninstallButton.ModalResult := mrOK;
UninstallProgressForm.CancelButton.TabOrder := UninstallButton.TabOrder + 1;
UninstallPage := TNewNotebookPage.Create(UninstallProgressForm);
UninstallPage.Notebook := UninstallProgressForm.InnerNotebook;
UninstallPage.Parent := UninstallProgressForm.InnerNotebook;
UninstallPage.Align := alClient;
UninstallProgressForm.InnerNotebook.ActivePage := UninstallPage;
ctrl := UninstallProgressForm.StatusLabel;
with TNewStaticText.Create(UninstallProgressForm) do begin
Parent := UninstallPage;
Top := ctrl.Top;
Left := ctrl.Left;
Width := ctrl.Width;
Height := ctrl.Height;
AutoSize := False;
ShowAccelChar := False;
Caption := '';
end;
if (DirExists(GetEnv('USERPROFILE') + '\.ollama\models\blobs')) then begin
ModelsDir := GetEnv('USERPROFILE') + '\.ollama\models';
ModelsSize := GetDirSize(ModelsDir);
end;
DeleteModelsCheckbox := TNewCheckBox.Create(UninstallProgressForm);
DeleteModelsCheckbox.Parent := UninstallPage;
DeleteModelsCheckbox.Top := ctrl.Top + ScaleY(30);
DeleteModelsCheckbox.Left := ctrl.Left;
DeleteModelsCheckbox.Width := ScaleX(300);
if ModelsSize > 1024*1024*1024 then begin
DeleteModelsCheckbox.Caption := 'Remove models (' + IntToStr(ModelsSize/(1024*1024*1024)) + ' GB) ' + ModelsDir;
end else if ModelsSize > 1024*1024 then begin
DeleteModelsCheckbox.Caption := 'Remove models (' + IntToStr(ModelsSize/(1024*1024)) + ' MB) ' + ModelsDir;
end else begin
DeleteModelsCheckbox.Caption := 'Remove models ' + ModelsDir;
end;
DeleteModelsCheckbox.Checked := True;
OriginalPageNameLabel := UninstallProgressForm.PageNameLabel.Caption;
OriginalPageDescriptionLabel := UninstallProgressForm.PageDescriptionLabel.Caption;
OriginalCancelButtonEnabled := UninstallProgressForm.CancelButton.Enabled;
OriginalCancelButtonModalResult := UninstallProgressForm.CancelButton.ModalResult;
UninstallProgressForm.PageNameLabel.Caption := '';
UninstallProgressForm.PageDescriptionLabel.Caption := '';
UninstallProgressForm.CancelButton.Enabled := True;
UninstallProgressForm.CancelButton.ModalResult := mrCancel;
if UninstallProgressForm.ShowModal = mrCancel then Abort;
UninstallButton.Visible := False;
UninstallProgressForm.PageNameLabel.Caption := OriginalPageNameLabel;
UninstallProgressForm.PageDescriptionLabel.Caption := OriginalPageDescriptionLabel;
UninstallProgressForm.CancelButton.Enabled := OriginalCancelButtonEnabled;
UninstallProgressForm.CancelButton.ModalResult := OriginalCancelButtonModalResult;
UninstallProgressForm.InnerNotebook.ActivePage := UninstallProgressForm.InstallingPage;
if DeleteModelsCheckbox.Checked then begin
DeleteModelsChecked:=True;
end else begin
DeleteModelsChecked:=False;
end;
end;
end;
procedure CurUninstallStepChanged(CurUninstallStep: TUninstallStep);
begin
if CurUninstallStep = usDone then begin
if DeleteModelsChecked then begin
Log('user requested model cleanup');
if (VarIsEmpty(ModelsDir)) then begin
Log('cleaning up home directory models')
DelTree(GetEnv('USERPROFILE') + '\.ollama\models', True, True, True);
end else begin
Log('cleaning up custom directory models ' + ModelsDir)
DelTree(ModelsDir + '\blobs', True, True, True);
DelTree(ModelsDir + '\manifests', True, True, True);
end;
end else begin
Log('user requested to preserve model dir');
end;
end;
end;
procedure TaskKill(FileName: String);
var
ResultCode: Integer;
begin
Exec('taskkill.exe', '/f /im ' + '"' + FileName + '"', '', SW_HIDE, ewWaitUntilTerminated, ResultCode);
end;

View File

@@ -1,29 +0,0 @@
#include <winver.h>
VS_VERSION_INFO VERSIONINFO
FILEFLAGSMASK 0x3fL
#ifdef _DEBUG
FILEFLAGS 0x1L
#else
FILEFLAGS 0x0L
#endif
FILEOS 0x40004L
FILETYPE 0x1L
FILESUBTYPE 0x0L
BEGIN
BLOCK "StringFileInfo"
BEGIN
BLOCK "040904b0"
BEGIN
VALUE "FileDescription", "Ollama"
VALUE "InternalName", "Ollama"
VALUE "OriginalFilename", "ollama app.exe"
VALUE "ProductName", "Ollama"
END
END
BLOCK "VarFileInfo"
BEGIN
VALUE "Translation", 0x409, 1200
END
END

16500
app/package-lock.json generated Normal file
View File

File diff suppressed because it is too large Load Diff

81
app/package.json Normal file
View File

@@ -0,0 +1,81 @@
{
"name": "ollama",
"productName": "Ollama",
"version": "0.0.0",
"description": "ollama",
"main": ".webpack/main",
"scripts": {
"start": "electron-forge start",
"package": "electron-forge package",
"package:sign": "SIGN=1 electron-forge package",
"make": "electron-forge make",
"make:sign": "SIGN=1 electron-forge make",
"publish": "SIGN=1 electron-forge publish",
"lint": "eslint --ext .ts,.tsx ."
},
"keywords": [],
"author": {
"name": "Jeffrey Morgan",
"email": "jmorganca@gmail.com"
},
"license": "MIT",
"devDependencies": {
"@babel/core": "^7.22.5",
"@babel/preset-react": "^7.22.5",
"@electron-forge/cli": "^6.2.1",
"@electron-forge/maker-deb": "^6.2.1",
"@electron-forge/maker-rpm": "^6.2.1",
"@electron-forge/maker-squirrel": "^6.2.1",
"@electron-forge/maker-zip": "^6.2.1",
"@electron-forge/plugin-auto-unpack-natives": "^6.2.1",
"@electron-forge/plugin-webpack": "^6.2.1",
"@electron-forge/publisher-github": "^6.2.1",
"@svgr/webpack": "^8.0.1",
"@types/chmodr": "^1.0.0",
"@types/node": "^20.4.0",
"@types/react": "^18.2.14",
"@types/react-dom": "^18.2.6",
"@types/uuid": "^9.0.2",
"@typescript-eslint/eslint-plugin": "^5.60.0",
"@typescript-eslint/parser": "^5.60.0",
"@vercel/webpack-asset-relocator-loader": "^1.7.3",
"babel-loader": "^9.1.2",
"chmodr": "^1.2.0",
"copy-webpack-plugin": "^11.0.0",
"css-loader": "^6.8.1",
"electron": "25.2.0",
"eslint": "^8.43.0",
"eslint-plugin-import": "^2.27.5",
"fork-ts-checker-webpack-plugin": "^7.3.0",
"node-loader": "^2.0.0",
"postcss": "^8.4.24",
"postcss-import": "^15.1.0",
"postcss-loader": "^7.3.3",
"postcss-preset-env": "^8.5.1",
"prettier": "^2.8.8",
"prettier-plugin-tailwindcss": "^0.3.0",
"style-loader": "^3.3.3",
"svg-inline-loader": "^0.8.2",
"tailwindcss": "^3.3.2",
"ts-loader": "^9.4.3",
"ts-node": "^10.9.1",
"typescript": "~4.5.4",
"url-loader": "^4.1.1",
"webpack": "^5.88.0",
"webpack-cli": "^5.1.4",
"webpack-dev-server": "^4.15.1"
},
"dependencies": {
"@electron/remote": "^2.0.10",
"@heroicons/react": "^2.0.18",
"@segment/analytics-node": "^1.0.0",
"copy-to-clipboard": "^3.3.3",
"electron-squirrel-startup": "^1.0.0",
"electron-store": "^8.1.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"uuid": "^9.0.0",
"winston": "^3.10.0",
"winston-daily-rotate-file": "^4.7.1"
}
}

7
app/postcss.config.js Normal file
View File

@@ -0,0 +1,7 @@
module.exports = {
plugins: {
'postcss-import': {},
tailwindcss: {},
autoprefixer: {},
},
}

View File

@@ -1,357 +0,0 @@
//go:build windows || darwin
package server
import (
"bufio"
"context"
"errors"
"fmt"
"io"
"log/slog"
"os"
"os/exec"
"path/filepath"
"regexp"
"runtime"
"strconv"
"strings"
"time"
"github.com/ollama/ollama/app/logrotate"
"github.com/ollama/ollama/app/store"
)
const restartDelay = time.Second
// Server is a managed ollama server process
type Server struct {
store *store.Store
bin string // resolved path to `ollama`
log io.WriteCloser
dev bool // true if running with the dev flag
}
type InferenceCompute struct {
Library string
Variant string
Compute string
Driver string
Name string
VRAM string
}
func New(s *store.Store, devMode bool) *Server {
p := resolvePath("ollama")
return &Server{store: s, bin: p, dev: devMode}
}
func resolvePath(name string) string {
// look in the app bundle first
if exe, _ := os.Executable(); exe != "" {
var dir string
if runtime.GOOS == "windows" {
dir = filepath.Dir(exe)
} else {
dir = filepath.Join(filepath.Dir(exe), "..", "Resources")
}
if _, err := os.Stat(filepath.Join(dir, name)); err == nil {
return filepath.Join(dir, name)
}
}
// check the development dist path
for _, path := range []string{
filepath.Join("dist", runtime.GOOS, name),
filepath.Join("dist", runtime.GOOS+"-"+runtime.GOARCH, name),
} {
if _, err := os.Stat(path); err == nil {
return path
}
}
// fallback to system path
if p, _ := exec.LookPath(name); p != "" {
return p
}
return name
}
// cleanup checks the pid file for a running ollama process
// and shuts it down gracefully if it is running
func cleanup() error {
data, err := os.ReadFile(pidFile)
if err != nil {
if os.IsNotExist(err) {
return nil
}
return err
}
defer os.Remove(pidFile)
pid, err := strconv.Atoi(strings.TrimSpace(string(data)))
if err != nil {
return err
}
proc, err := os.FindProcess(pid)
if err != nil {
return nil
}
ok, err := terminated(pid)
if err != nil {
slog.Debug("cleanup: error checking if terminated", "pid", pid, "err", err)
}
if ok {
return nil
}
slog.Info("detected previous ollama process, cleaning up", "pid", pid)
return stop(proc)
}
// stop waits for a process with the provided pid to exit by polling
// `terminated(pid)`. If the process has not exited within 5 seconds, it logs a
// warning and kills the process.
func stop(proc *os.Process) error {
if proc == nil {
return nil
}
if err := terminate(proc); err != nil {
slog.Warn("graceful terminate failed, killing", "err", err)
return proc.Kill()
}
deadline := time.NewTimer(5 * time.Second)
defer deadline.Stop()
for {
select {
case <-deadline.C:
slog.Warn("timeout waiting for graceful shutdown; killing", "pid", proc.Pid)
return proc.Kill()
default:
ok, err := terminated(proc.Pid)
if err != nil {
slog.Error("error checking if ollama process is terminated", "err", err)
return err
}
if ok {
return nil
}
time.Sleep(10 * time.Millisecond)
}
}
}
func (s *Server) Run(ctx context.Context) error {
l, err := openRotatingLog()
if err != nil {
return err
}
s.log = l
defer s.log.Close()
if err := cleanup(); err != nil {
slog.Warn("failed to cleanup previous ollama process", "err", err)
}
reaped := false
for ctx.Err() == nil {
select {
case <-ctx.Done():
return ctx.Err()
case <-time.After(restartDelay):
}
cmd, err := s.cmd(ctx)
if err != nil {
return err
}
if err := cmd.Start(); err != nil {
return err
}
err = os.WriteFile(pidFile, []byte(strconv.Itoa(cmd.Process.Pid)), 0o644)
if err != nil {
slog.Warn("failed to write pid file", "file", pidFile, "err", err)
}
if err = cmd.Wait(); err != nil && !errors.Is(err, context.Canceled) {
var exitErr *exec.ExitError
if errors.As(err, &exitErr) && exitErr.ExitCode() == 1 && !s.dev && !reaped {
reaped = true
// This could be a port conflict, try to kill any existing ollama processes
if err := reapServers(); err != nil {
slog.Warn("failed to stop existing ollama server", "err", err)
} else {
slog.Debug("conflicting server stopped, waiting for port to be released")
continue
}
}
slog.Error("ollama exited", "err", err)
}
}
return ctx.Err()
}
func (s *Server) cmd(ctx context.Context) (*exec.Cmd, error) {
settings, err := s.store.Settings()
if err != nil {
return nil, err
}
cmd := commandContext(ctx, s.bin, "serve")
cmd.Stdout, cmd.Stderr = s.log, s.log
// Copy and mutate the environment to merge in settings the user has specified without dups
env := map[string]string{}
for _, kv := range os.Environ() {
s := strings.SplitN(kv, "=", 2)
env[s[0]] = s[1]
}
if settings.Expose {
env["OLLAMA_HOST"] = "0.0.0.0"
}
if settings.Browser {
env["OLLAMA_ORIGINS"] = "*"
}
if settings.Models != "" {
if _, err := os.Stat(settings.Models); err == nil {
env["OLLAMA_MODELS"] = settings.Models
} else {
slog.Warn("models path not accessible, clearing models setting", "path", settings.Models, "err", err)
settings.Models = ""
s.store.SetSettings(settings)
}
}
if settings.ContextLength > 0 {
env["OLLAMA_CONTEXT_LENGTH"] = strconv.Itoa(settings.ContextLength)
}
cmd.Env = []string{}
for k, v := range env {
cmd.Env = append(cmd.Env, k+"="+v)
}
cmd.Cancel = func() error {
if cmd.Process == nil {
return nil
}
return stop(cmd.Process)
}
return cmd, nil
}
func openRotatingLog() (io.WriteCloser, error) {
// TODO consider rotation based on size or time, not just every server invocation
dir := filepath.Dir(serverLogPath)
if err := os.MkdirAll(dir, 0o755); err != nil {
return nil, fmt.Errorf("create log directory: %w", err)
}
logrotate.Rotate(serverLogPath)
f, err := os.OpenFile(serverLogPath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o644)
if err != nil {
return nil, fmt.Errorf("open log file: %w", err)
}
return f, nil
}
// Attempt to retrieve inference compute information from the server
// log. Set ctx to timeout to control how long to wait for the logs to appear
func GetInferenceComputer(ctx context.Context) ([]InferenceCompute, error) {
inference := []InferenceCompute{}
marker := regexp.MustCompile(`inference compute.*library=`)
q := `inference compute.*%s=["]([^"]*)["]`
nq := `inference compute.*%s=(\S+)\s`
type regex struct {
q *regexp.Regexp
nq *regexp.Regexp
}
regexes := map[string]regex{
"library": {
q: regexp.MustCompile(fmt.Sprintf(q, "library")),
nq: regexp.MustCompile(fmt.Sprintf(nq, "library")),
},
"variant": {
q: regexp.MustCompile(fmt.Sprintf(q, "variant")),
nq: regexp.MustCompile(fmt.Sprintf(nq, "variant")),
},
"compute": {
q: regexp.MustCompile(fmt.Sprintf(q, "compute")),
nq: regexp.MustCompile(fmt.Sprintf(nq, "compute")),
},
"driver": {
q: regexp.MustCompile(fmt.Sprintf(q, "driver")),
nq: regexp.MustCompile(fmt.Sprintf(nq, "driver")),
},
"name": {
q: regexp.MustCompile(fmt.Sprintf(q, "name")),
nq: regexp.MustCompile(fmt.Sprintf(nq, "name")),
},
"total": {
q: regexp.MustCompile(fmt.Sprintf(q, "total")),
nq: regexp.MustCompile(fmt.Sprintf(nq, "total")),
},
}
get := func(field, line string) string {
regex, ok := regexes[field]
if !ok {
slog.Warn("missing field", "field", field)
return ""
}
match := regex.q.FindStringSubmatch(line)
if len(match) > 1 {
return match[1]
}
match = regex.nq.FindStringSubmatch(line)
if len(match) > 1 {
return match[1]
}
return ""
}
for {
select {
case <-ctx.Done():
return nil, fmt.Errorf("timeout scanning server log for inference compute details")
default:
}
file, err := os.Open(serverLogPath)
if err != nil {
slog.Debug("failed to open server log", "log", serverLogPath, "error", err)
time.Sleep(time.Second)
continue
}
defer file.Close()
scanner := bufio.NewScanner(file)
for scanner.Scan() {
line := scanner.Text()
match := marker.FindStringSubmatch(line)
if len(match) > 0 {
ic := InferenceCompute{
Library: get("library", line),
Variant: get("variant", line),
Compute: get("compute", line),
Driver: get("driver", line),
Name: get("name", line),
VRAM: get("total", line),
}
slog.Info("Matched", "inference compute", ic)
inference = append(inference, ic)
} else {
// Break out on first non matching line after we start matching
if len(inference) > 0 {
return inference, nil
}
}
}
time.Sleep(100 * time.Millisecond)
}
}

View File

@@ -1,249 +0,0 @@
//go:build windows || darwin
package server
import (
"context"
"os"
"path/filepath"
"reflect"
"strings"
"testing"
"time"
"github.com/ollama/ollama/app/store"
)
func TestNew(t *testing.T) {
tmpDir := t.TempDir()
st := &store.Store{DBPath: filepath.Join(tmpDir, "db.sqlite")}
defer st.Close() // Ensure database is closed before cleanup
s := New(st, false)
if s == nil {
t.Fatal("expected non-nil server")
}
if s.bin == "" {
t.Error("expected non-empty bin path")
}
}
func TestServerCmd(t *testing.T) {
os.Unsetenv("OLLAMA_HOST")
os.Unsetenv("OLLAMA_ORIGINS")
os.Unsetenv("OLLAMA_MODELS")
var defaultModels string
home, err := os.UserHomeDir()
if err == nil {
defaultModels = filepath.Join(home, ".ollama", "models")
os.MkdirAll(defaultModels, 0o755)
}
tmpModels := t.TempDir()
tests := []struct {
name string
settings store.Settings
want []string
dont []string
}{
{
name: "default",
settings: store.Settings{},
want: []string{"OLLAMA_MODELS=" + defaultModels},
dont: []string{"OLLAMA_HOST=", "OLLAMA_ORIGINS="},
},
{
name: "expose",
settings: store.Settings{Expose: true},
want: []string{"OLLAMA_HOST=0.0.0.0", "OLLAMA_MODELS=" + defaultModels},
dont: []string{"OLLAMA_ORIGINS="},
},
{
name: "browser",
settings: store.Settings{Browser: true},
want: []string{"OLLAMA_ORIGINS=*", "OLLAMA_MODELS=" + defaultModels},
dont: []string{"OLLAMA_HOST="},
},
{
name: "models",
settings: store.Settings{Models: tmpModels},
want: []string{"OLLAMA_MODELS=" + tmpModels},
dont: []string{"OLLAMA_HOST=", "OLLAMA_ORIGINS="},
},
{
name: "inaccessible_models",
settings: store.Settings{Models: "/nonexistent/external/drive/models"},
want: []string{},
dont: []string{"OLLAMA_MODELS="},
},
{
name: "all",
settings: store.Settings{
Expose: true,
Browser: true,
Models: tmpModels,
},
want: []string{
"OLLAMA_HOST=0.0.0.0",
"OLLAMA_ORIGINS=*",
"OLLAMA_MODELS=" + tmpModels,
},
dont: []string{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
st := &store.Store{DBPath: filepath.Join(tmpDir, "db.sqlite")}
defer st.Close() // Ensure database is closed before cleanup
st.SetSettings(tt.settings)
s := &Server{
store: st,
}
cmd, err := s.cmd(t.Context())
if err != nil {
t.Fatalf("s.cmd() error = %v", err)
}
for _, want := range tt.want {
found := false
for _, env := range cmd.Env {
if strings.Contains(env, want) {
found = true
break
}
}
if !found {
t.Errorf("expected environment variable containing %s", want)
}
}
for _, dont := range tt.dont {
for _, env := range cmd.Env {
if strings.Contains(env, dont) {
t.Errorf("unexpected environment variable: %s", env)
}
}
}
if cmd.Cancel == nil {
t.Error("expected non-nil cancel function")
}
})
}
}
func TestGetInferenceComputer(t *testing.T) {
tests := []struct {
name string
log string
exp []InferenceCompute
}{
{
name: "metal",
log: `time=2025-06-30T09:23:07.374-07:00 level=DEBUG source=sched.go:108 msg="starting llm scheduler"
time=2025-06-30T09:23:07.416-07:00 level=INFO source=types.go:130 msg="inference compute" id=0 library=metal variant="" compute="" driver=0.0 name="" total="96.0 GiB" available="96.0 GiB"
time=2025-06-30T09:25:56.197-07:00 level=DEBUG source=ggml.go:155 msg="key not found" key=general.alignment default=32
`,
exp: []InferenceCompute{{
Library: "metal",
Driver: "0.0",
VRAM: "96.0 GiB",
}},
},
{
name: "cpu",
log: `time=2025-07-01T17:59:51.470Z level=INFO source=gpu.go:377 msg="no compatible GPUs were discovered"
time=2025-07-01T17:59:51.470Z level=INFO source=types.go:130 msg="inference compute" id=0 library=cpu variant="" compute="" driver=0.0 name="" total="31.3 GiB" available="30.4 GiB"
[GIN] 2025/07/01 - 18:00:09 | 200 | 50.263µs | 100.126.204.152 | HEAD "/"
`,
exp: []InferenceCompute{{
Library: "cpu",
Driver: "0.0",
VRAM: "31.3 GiB",
}},
},
{
name: "cuda1",
log: `time=2025-07-01T19:33:43.162Z level=DEBUG source=amd_linux.go:419 msg="amdgpu driver not detected /sys/module/amdgpu"
releasing cuda driver library
time=2025-07-01T19:33:43.162Z level=INFO source=types.go:130 msg="inference compute" id=GPU-452cac9f-6960-839c-4fb3-0cec83699196 library=cuda variant=v12 compute=6.1 driver=12.7 name="NVIDIA GeForce GT 1030" total="3.9 GiB" available="3.9 GiB"
[GIN] 2025/07/01 - 18:00:09 | 200 | 50.263µs | 100.126.204.152 | HEAD "/"
`,
exp: []InferenceCompute{{
Library: "cuda",
Variant: "v12",
Compute: "6.1",
Driver: "12.7",
Name: "NVIDIA GeForce GT 1030",
VRAM: "3.9 GiB",
}},
},
{
name: "frank",
log: `time=2025-07-01T19:36:13.315Z level=INFO source=amd_linux.go:386 msg="amdgpu is supported" gpu=GPU-9abb57639fa80c50 gpu_type=gfx1030
releasing cuda driver library
time=2025-07-01T19:36:13.315Z level=INFO source=types.go:130 msg="inference compute" id=GPU-d6de3398-9932-6902-11ec-fee8e424c8a2 library=cuda variant=v12 compute=7.5 driver=12.8 name="NVIDIA GeForce RTX 2080 Ti" total="10.6 GiB" available="10.4 GiB"
time=2025-07-01T19:36:13.315Z level=INFO source=types.go:130 msg="inference compute" id=GPU-9abb57639fa80c50 library=rocm variant="" compute=gfx1030 driver=6.3 name=1002:73bf total="16.0 GiB" available="1.3 GiB"
[GIN] 2025/07/01 - 18:00:09 | 200 | 50.263µs | 100.126.204.152 | HEAD "/"
`,
exp: []InferenceCompute{
{
Library: "cuda",
Variant: "v12",
Compute: "7.5",
Driver: "12.8",
Name: "NVIDIA GeForce RTX 2080 Ti",
VRAM: "10.6 GiB",
},
{
Library: "rocm",
Compute: "gfx1030",
Driver: "6.3",
Name: "1002:73bf",
VRAM: "16.0 GiB",
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tmpDir := t.TempDir()
serverLogPath = filepath.Join(tmpDir, "server.log")
err := os.WriteFile(serverLogPath, []byte(tt.log), 0o644)
if err != nil {
t.Fatalf("failed to write log file %s: %s", serverLogPath, err)
}
ctx, cancel := context.WithTimeout(t.Context(), 10*time.Millisecond)
defer cancel()
ics, err := GetInferenceComputer(ctx)
if err != nil {
t.Fatalf(" failed to get inference compute: %v", err)
}
if !reflect.DeepEqual(ics, tt.exp) {
t.Fatalf("got:\n%#v\nwant:\n%#v", ics, tt.exp)
}
})
}
}
func TestGetInferenceComputerTimeout(t *testing.T) {
ctx, cancel := context.WithTimeout(t.Context(), 10*time.Millisecond)
defer cancel()
tmpDir := t.TempDir()
serverLogPath = filepath.Join(tmpDir, "server.log")
err := os.WriteFile(serverLogPath, []byte("foo\nbar\nbaz\n"), 0o644)
if err != nil {
t.Fatalf("failed to write log file %s: %s", serverLogPath, err)
}
_, err = GetInferenceComputer(ctx)
if err == nil {
t.Fatal("expected timeout")
}
if !strings.Contains(err.Error(), "timeout") {
t.Fatalf("unexpected error: %s", err)
}
}

View File

@@ -1,104 +0,0 @@
//go:build darwin
package server
import (
"context"
"errors"
"fmt"
"log/slog"
"os"
"os/exec"
"path/filepath"
"strconv"
"strings"
"syscall"
)
var (
pidFile = filepath.Join(os.Getenv("HOME"), "Library", "Application Support", "Ollama", "ollama.pid")
serverLogPath = filepath.Join(os.Getenv("HOME"), ".ollama", "logs", "server.log")
)
func commandContext(ctx context.Context, name string, arg ...string) *exec.Cmd {
return exec.CommandContext(ctx, name, arg...)
}
func terminate(proc *os.Process) error {
return proc.Signal(os.Interrupt)
}
func terminated(pid int) (bool, error) {
proc, err := os.FindProcess(pid)
if err != nil {
return false, fmt.Errorf("failed to find process: %v", err)
}
err = proc.Signal(syscall.Signal(0))
if err != nil {
if errors.Is(err, os.ErrProcessDone) || errors.Is(err, syscall.ESRCH) {
return true, nil
}
return false, fmt.Errorf("error signaling process: %v", err)
}
return false, nil
}
// reapServers kills all ollama processes except our own
func reapServers() error {
// Get our own PID to avoid killing ourselves
currentPID := os.Getpid()
// Use pkill to kill ollama processes
// -x matches the whole command name exactly
// We'll get the list first, then kill selectively
cmd := exec.Command("pgrep", "-x", "ollama")
output, err := cmd.Output()
if err != nil {
// No ollama processes found
slog.Debug("no ollama processes found")
return nil //nolint:nilerr
}
pidsStr := strings.TrimSpace(string(output))
if pidsStr == "" {
return nil
}
pids := strings.Split(pidsStr, "\n")
for _, pidStr := range pids {
pidStr = strings.TrimSpace(pidStr)
if pidStr == "" {
continue
}
pid, err := strconv.Atoi(pidStr)
if err != nil {
slog.Debug("failed to parse PID", "pidStr", pidStr, "err", err)
continue
}
if pid == currentPID {
continue
}
proc, err := os.FindProcess(pid)
if err != nil {
slog.Debug("failed to find process", "pid", pid, "err", err)
continue
}
if err := proc.Signal(syscall.SIGTERM); err != nil {
// Try SIGKILL if SIGTERM fails
if err := proc.Signal(syscall.SIGKILL); err != nil {
slog.Warn("failed to stop external ollama process", "pid", pid, "err", err)
continue
}
}
slog.Info("stopped external ollama process", "pid", pid)
}
return nil
}

View File

@@ -1,149 +0,0 @@
package server
import (
"context"
"fmt"
"log/slog"
"os"
"os/exec"
"path/filepath"
"strconv"
"strings"
"syscall"
"golang.org/x/sys/windows"
)
var (
pidFile = filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "ollama.pid")
serverLogPath = filepath.Join(os.Getenv("LOCALAPPDATA"), "Ollama", "server.log")
)
func commandContext(ctx context.Context, name string, arg ...string) *exec.Cmd {
cmd := exec.CommandContext(ctx, name, arg...)
cmd.SysProcAttr = &syscall.SysProcAttr{
HideWindow: true,
CreationFlags: windows.CREATE_NEW_PROCESS_GROUP,
}
return cmd
}
func terminate(proc *os.Process) error {
dll, err := windows.LoadDLL("kernel32.dll")
if err != nil {
return err
}
defer dll.Release()
pid := proc.Pid
f, err := dll.FindProc("AttachConsole")
if err != nil {
return err
}
r1, _, err := f.Call(uintptr(pid))
if r1 == 0 && err != syscall.ERROR_ACCESS_DENIED {
return err
}
f, err = dll.FindProc("SetConsoleCtrlHandler")
if err != nil {
return err
}
r1, _, err = f.Call(0, 1)
if r1 == 0 {
return err
}
f, err = dll.FindProc("GenerateConsoleCtrlEvent")
if err != nil {
return err
}
r1, _, err = f.Call(windows.CTRL_BREAK_EVENT, uintptr(pid))
if r1 == 0 {
return err
}
r1, _, err = f.Call(windows.CTRL_C_EVENT, uintptr(pid))
if r1 == 0 {
return err
}
return nil
}
const STILL_ACTIVE = 259
func terminated(pid int) (bool, error) {
hProcess, err := windows.OpenProcess(windows.PROCESS_QUERY_INFORMATION, false, uint32(pid))
if err != nil {
if errno, ok := err.(windows.Errno); ok && errno == windows.ERROR_INVALID_PARAMETER {
return true, nil
}
return false, fmt.Errorf("failed to open process: %v", err)
}
defer windows.CloseHandle(hProcess)
var exitCode uint32
err = windows.GetExitCodeProcess(hProcess, &exitCode)
if err != nil {
return false, fmt.Errorf("failed to get exit code: %v", err)
}
if exitCode == STILL_ACTIVE {
return false, nil
}
return true, nil
}
// reapServers kills all ollama processes except our own
func reapServers() error {
// Get current process ID to avoid killing ourselves
currentPID := os.Getpid()
// Use wmic to find ollama processes
cmd := exec.Command("wmic", "process", "where", "name='ollama.exe'", "get", "ProcessId")
cmd.SysProcAttr = &syscall.SysProcAttr{HideWindow: true}
output, err := cmd.Output()
if err != nil {
// No ollama processes found
slog.Debug("no ollama processes found")
return nil //nolint:nilerr
}
lines := strings.Split(string(output), "\n")
var pids []string
for _, line := range lines {
line = strings.TrimSpace(line)
if line == "" || line == "ProcessId" {
continue
}
if _, err := strconv.Atoi(line); err == nil {
pids = append(pids, line)
}
}
for _, pidStr := range pids {
pid, err := strconv.Atoi(pidStr)
if err != nil {
continue
}
if pid == currentPID {
continue
}
cmd := exec.Command("taskkill", "/F", "/PID", pidStr)
if err := cmd.Run(); err != nil {
slog.Warn("failed to kill ollama process", "pid", pid, "err", err)
}
}
return nil
}

34
app/src/app.css Normal file
View File

@@ -0,0 +1,34 @@
@tailwind base;
@tailwind components;
@tailwind utilities;
html,
body {
background: transparent;
}
.drag {
-webkit-app-region: drag;
}
.no-drag {
-webkit-app-region: no-drag;
}
.blink {
-webkit-animation: 1s blink step-end infinite;
-moz-animation: 1s blink step-end infinite;
-ms-animation: 1s blink step-end infinite;
-o-animation: 1s blink step-end infinite;
animation: 1s blink step-end infinite;
}
@keyframes blink {
from,
to {
color: transparent;
}
50% {
color: black;
}
}

111
app/src/app.tsx Normal file
View File

@@ -0,0 +1,111 @@
import { useState } from 'react'
import copy from 'copy-to-clipboard'
import { CheckIcon, DocumentDuplicateIcon } from '@heroicons/react/24/outline'
import Store from 'electron-store'
import { getCurrentWindow } from '@electron/remote'
import { install } from './install'
import OllamaIcon from './ollama.svg'
const store = new Store()
enum Step {
WELCOME = 0,
CLI,
FINISH,
}
export default function () {
const [step, setStep] = useState<Step>(Step.WELCOME)
const [commandCopied, setCommandCopied] = useState<boolean>(false)
const command = 'ollama run orca'
return (
<div className='drag'>
<div className='mx-auto flex min-h-screen w-full flex-col justify-between bg-white px-4 pt-16'>
{step === Step.WELCOME && (
<>
<div className='mx-auto text-center'>
<h1 className='mb-6 mt-4 text-2xl tracking-tight text-gray-900'>Welcome to Ollama</h1>
<p className='mx-auto w-[65%] text-sm text-gray-400'>
Let's get you up and running with your own large language models.
</p>
<button
onClick={() => setStep(Step.CLI)}
className='no-drag rounded-dm mx-auto my-8 w-[40%] rounded-md bg-black px-4 py-2 text-sm text-white hover:brightness-110'
>
Next
</button>
</div>
<div className='mx-auto'>
<OllamaIcon />
</div>
</>
)}
{step === Step.CLI && (
<>
<div className='mx-auto flex flex-col space-y-28 text-center'>
<h1 className='mt-4 text-2xl tracking-tight text-gray-900'>Install the command line</h1>
<pre className='mx-auto text-4xl text-gray-400'>&gt; ollama</pre>
<div className='mx-auto'>
<button
onClick={async () => {
await install()
getCurrentWindow().show()
getCurrentWindow().focus()
setStep(Step.FINISH)
}}
className='no-drag rounded-dm mx-auto w-[60%] rounded-md bg-black px-4 py-2 text-sm text-white hover:brightness-110'
>
Install
</button>
<p className='mx-auto my-4 w-[70%] text-xs text-gray-400'>
You will be prompted for administrator access
</p>
</div>
</div>
</>
)}
{step === Step.FINISH && (
<>
<div className='mx-auto flex flex-col space-y-20 text-center'>
<h1 className='mt-4 text-2xl tracking-tight text-gray-900'>Run your first model</h1>
<div className='flex flex-col'>
<div className='group relative flex items-center'>
<pre className='language-none text-2xs w-full rounded-md bg-gray-100 px-4 py-3 text-start leading-normal'>
{command}
</pre>
<button
className={`no-drag absolute right-[5px] px-2 py-2 ${commandCopied ? 'text-gray-900 opacity-100 hover:cursor-auto' : 'text-gray-200 opacity-50 hover:cursor-pointer'} hover:text-gray-900 hover:font-bold group-hover:opacity-100`}
onClick={() => {
copy(command)
setCommandCopied(true)
setTimeout(() => setCommandCopied(false), 3000)
}}
>
{commandCopied ? (
<CheckIcon className='h-4 w-4 text-gray-500 font-bold' />
) : (
<DocumentDuplicateIcon className='h-4 w-4 text-gray-500' />
)}
</button>
</div>
<p className='mx-auto my-4 w-[70%] text-xs text-gray-400'>Run this command in your favorite terminal.</p>
</div>
<button
onClick={() => {
store.set('first-time-run', true)
window.close()
}}
className='no-drag rounded-dm mx-auto w-[60%] rounded-md bg-black px-4 py-2 text-sm text-white hover:brightness-110'
>
Finish
</button>
</div>
</>
)}
</div>
</div>
)
}

4
app/src/declarations.d.ts vendored Normal file
View File

@@ -0,0 +1,4 @@
declare module '*.svg' {
const content: string;
export default content;
}

9
app/src/index.html Normal file
View File

@@ -0,0 +1,9 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
</head>
<body>
<div id="app"></div>
</body>
</html>

219
app/src/index.ts Normal file
View File

@@ -0,0 +1,219 @@
import { spawn } from 'child_process'
import { app, autoUpdater, dialog, Tray, Menu, BrowserWindow } from 'electron'
import Store from 'electron-store'
import winston from 'winston'
import 'winston-daily-rotate-file'
import * as path from 'path'
import { analytics, id } from './telemetry'
import { installed } from './install'
require('@electron/remote/main').initialize()
const store = new Store()
let tray: Tray | null = null
let welcomeWindow: BrowserWindow | null = null
declare const MAIN_WINDOW_WEBPACK_ENTRY: string
const logger = winston.createLogger({
transports: [
new winston.transports.Console(),
new winston.transports.File({
filename: path.join(app.getPath('home'), '.ollama', 'logs', 'server.log'),
maxsize: 1024 * 1024 * 20,
maxFiles: 5,
}),
],
format: winston.format.printf(info => info.message),
})
const SingleInstanceLock = app.requestSingleInstanceLock()
if (!SingleInstanceLock) {
app.quit()
}
function firstRunWindow() {
// Create the browser window.
welcomeWindow = new BrowserWindow({
width: 400,
height: 500,
frame: false,
fullscreenable: false,
resizable: false,
movable: true,
show: false,
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
},
alwaysOnTop: true,
})
require('@electron/remote/main').enable(welcomeWindow.webContents)
// and load the index.html of the app.
welcomeWindow.loadURL(MAIN_WINDOW_WEBPACK_ENTRY)
welcomeWindow.on('ready-to-show', () => welcomeWindow.show())
// for debugging
// welcomeWindow.webContents.openDevTools()
if (process.platform === 'darwin') {
app.dock.hide()
}
}
function createSystemtray() {
let iconPath = path.join(__dirname, '..', '..', 'assets', 'ollama_icon_16x16Template.png')
if (app.isPackaged) {
iconPath = path.join(process.resourcesPath, 'ollama_icon_16x16Template.png')
}
tray = new Tray(iconPath)
const contextMenu = Menu.buildFromTemplate([{ role: 'quit', label: 'Quit Ollama', accelerator: 'Command+Q' }])
tray.setContextMenu(contextMenu)
tray.setToolTip('Ollama')
}
if (require('electron-squirrel-startup')) {
app.quit()
}
function server() {
const binary = app.isPackaged
? path.join(process.resourcesPath, 'ollama')
: path.resolve(process.cwd(), '..', 'ollama')
const proc = spawn(binary, ['serve'])
proc.stdout.on('data', data => {
logger.info(data.toString().trim())
})
proc.stderr.on('data', data => {
logger.error(data.toString().trim())
})
function restart() {
logger.info('Restarting the server...')
server()
}
proc.on('exit', restart)
app.on('before-quit', () => {
proc.off('exit', restart)
proc.kill()
})
}
if (process.platform === 'darwin') {
app.dock.hide()
}
app.on('ready', () => {
if (process.platform === 'darwin') {
if (app.isPackaged) {
if (!app.isInApplicationsFolder()) {
const chosen = dialog.showMessageBoxSync({
type: 'question',
buttons: ['Move to Applications', 'Do Not Move'],
message: 'Ollama works best when run from the Applications directory.',
defaultId: 0,
cancelId: 1,
})
if (chosen === 0) {
try {
app.moveToApplicationsFolder({
conflictHandler: conflictType => {
if (conflictType === 'existsAndRunning') {
dialog.showMessageBoxSync({
type: 'info',
message: 'Cannot move to Applications directory',
detail:
'Another version of Ollama is currently running from your Applications directory. Close it first and try again.',
})
}
return true
},
})
return
} catch (e) {
logger.error(`[Move to Applications] Failed to move to applications folder - ${e.message}}`)
}
}
}
}
}
createSystemtray()
server()
if (store.get('first-time-run') && installed()) {
app.setLoginItemSettings({ openAtLogin: app.getLoginItemSettings().openAtLogin })
return
}
// This is the first run or the CLI is no longer installed
app.setLoginItemSettings({ openAtLogin: true })
firstRunWindow()
})
// Quit when all windows are closed, except on macOS. There, it's common
// for applications and their menu bar to stay active until the user quits
// explicitly with Cmd + Q.
app.on('window-all-closed', () => {
if (process.platform !== 'darwin') {
app.quit()
}
})
// In this file you can include the rest of your app's specific main process
// code. You can also put them in separate files and import them here.
autoUpdater.setFeedURL({
url: `https://ollama.ai/api/update?os=${process.platform}&arch=${process.arch}&version=${app.getVersion()}`,
})
async function heartbeat() {
analytics.track({
anonymousId: id(),
event: 'heartbeat',
properties: {
version: app.getVersion(),
},
})
}
if (app.isPackaged) {
heartbeat()
autoUpdater.checkForUpdates()
setInterval(() => {
heartbeat()
autoUpdater.checkForUpdates()
}, 60 * 60 * 1000)
}
autoUpdater.on('error', e => {
logger.error(`update check failed - ${e.message}`)
})
autoUpdater.on('update-downloaded', (event, releaseNotes, releaseName) => {
dialog
.showMessageBox({
type: 'info',
buttons: ['Restart Now', 'Later'],
title: 'New update available',
message: process.platform === 'win32' ? releaseNotes : releaseName,
detail: 'A new version of Ollama is available. Restart to apply the update.',
})
.then(returnValue => {
if (returnValue.response === 0) autoUpdater.quitAndInstall()
})
})

24
app/src/install.ts Normal file
View File

@@ -0,0 +1,24 @@
import * as fs from 'fs'
import { exec as cbExec } from 'child_process'
import * as path from 'path'
import { promisify } from 'util'
const app = process && process.type === 'renderer' ? require('@electron/remote').app : require('electron').app
const ollama = app.isPackaged ? path.join(process.resourcesPath, 'ollama') : path.resolve(process.cwd(), '..', 'ollama')
const exec = promisify(cbExec)
const symlinkPath = '/usr/local/bin/ollama'
export function installed() {
return fs.existsSync(symlinkPath) && fs.readlinkSync(symlinkPath) === ollama
}
export async function install() {
const command = `do shell script "ln -F -s ${ollama} ${symlinkPath}" with administrator privileges`
try {
await exec(`osascript -e '${command}'`)
} catch (error) {
console.error(`cli: failed to install cli: ${error.message}`)
return
}
}

9
app/src/ollama.svg Normal file
View File

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 17 KiB

0
app/src/preload.ts Normal file
View File

7
app/src/renderer.tsx Normal file
View File

@@ -0,0 +1,7 @@
import App from './app'
import './app.css'
import { createRoot } from 'react-dom/client'
const container = document.getElementById('app')
const root = createRoot(container)
root.render(<App />)

19
app/src/telemetry.ts Normal file
View File

@@ -0,0 +1,19 @@
import { Analytics } from '@segment/analytics-node'
import { v4 as uuidv4 } from 'uuid'
import Store from 'electron-store'
const store = new Store()
export const analytics = new Analytics({ writeKey: process.env.TELEMETRY_WRITE_KEY || '<empty>' })
export function id(): string {
const id = store.get('id') as string
if (id) {
return id
}
const uuid = uuidv4()
store.set('id', uuid)
return uuid
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,407 +0,0 @@
//go:build windows || darwin
package store
import (
"database/sql"
"fmt"
"os"
"path/filepath"
"sort"
"strings"
"testing"
"time"
"github.com/google/go-cmp/cmp"
_ "github.com/mattn/go-sqlite3"
)
func TestSchemaMigrations(t *testing.T) {
t.Run("schema comparison after migration", func(t *testing.T) {
tmpDir := t.TempDir()
migratedDBPath := filepath.Join(tmpDir, "migrated.db")
migratedDB := loadV2Schema(t, migratedDBPath)
defer migratedDB.Close()
if err := migratedDB.migrate(); err != nil {
t.Fatalf("migration failed: %v", err)
}
// Create fresh database with current schema
freshDBPath := filepath.Join(tmpDir, "fresh.db")
freshDB, err := newDatabase(freshDBPath)
if err != nil {
t.Fatalf("failed to create fresh database: %v", err)
}
defer freshDB.Close()
// Extract tables and indexes from both databases, directly comparing their schemas won't work due to ordering
migratedSchema := schemaMap(migratedDB)
freshSchema := schemaMap(freshDB)
if !cmp.Equal(migratedSchema, freshSchema) {
t.Errorf("Schema difference found:\n%s", cmp.Diff(freshSchema, migratedSchema))
}
// Verify both databases have the same final schema version
migratedVersion, _ := migratedDB.getSchemaVersion()
freshVersion, _ := freshDB.getSchemaVersion()
if migratedVersion != freshVersion {
t.Errorf("schema version mismatch: migrated=%d, fresh=%d", migratedVersion, freshVersion)
}
})
t.Run("idempotent migrations", func(t *testing.T) {
tmpDir := t.TempDir()
dbPath := filepath.Join(tmpDir, "test.db")
db := loadV2Schema(t, dbPath)
defer db.Close()
// Run migration twice
if err := db.migrate(); err != nil {
t.Fatalf("first migration failed: %v", err)
}
if err := db.migrate(); err != nil {
t.Fatalf("second migration failed: %v", err)
}
// Verify schema version is still correct
version, err := db.getSchemaVersion()
if err != nil {
t.Fatalf("failed to get schema version: %v", err)
}
if version != currentSchemaVersion {
t.Errorf("expected schema version %d after double migration, got %d", currentSchemaVersion, version)
}
})
t.Run("init database has correct schema version", func(t *testing.T) {
tmpDir := t.TempDir()
dbPath := filepath.Join(tmpDir, "test.db")
db, err := newDatabase(dbPath)
if err != nil {
t.Fatalf("failed to create database: %v", err)
}
defer db.Close()
// Get the schema version from the newly initialized database
version, err := db.getSchemaVersion()
if err != nil {
t.Fatalf("failed to get schema version: %v", err)
}
// Verify it matches the currentSchemaVersion constant
if version != currentSchemaVersion {
t.Errorf("expected schema version %d in initialized database, got %d", currentSchemaVersion, version)
}
})
}
func TestChatDeletionWithCascade(t *testing.T) {
t.Run("chat deletion cascades to related messages", func(t *testing.T) {
tmpDir := t.TempDir()
dbPath := filepath.Join(tmpDir, "test.db")
db, err := newDatabase(dbPath)
if err != nil {
t.Fatalf("failed to create database: %v", err)
}
defer db.Close()
// Create test chat
testChatID := "test-chat-cascade-123"
testChat := Chat{
ID: testChatID,
Title: "Test Chat for Cascade Delete",
CreatedAt: time.Now(),
Messages: []Message{
{
Role: "user",
Content: "Hello, this is a test message",
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
{
Role: "assistant",
Content: "Hi there! This is a response.",
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
},
}
// Save the chat with messages
if err := db.saveChat(testChat); err != nil {
t.Fatalf("failed to save test chat: %v", err)
}
// Verify chat and messages exist
chatCount := countRows(t, db, "chats")
messageCount := countRows(t, db, "messages")
if chatCount != 1 {
t.Errorf("expected 1 chat, got %d", chatCount)
}
if messageCount != 2 {
t.Errorf("expected 2 messages, got %d", messageCount)
}
// Verify specific chat exists
var exists bool
err = db.conn.QueryRow("SELECT EXISTS(SELECT 1 FROM chats WHERE id = ?)", testChatID).Scan(&exists)
if err != nil {
t.Fatalf("failed to check chat existence: %v", err)
}
if !exists {
t.Error("test chat should exist before deletion")
}
// Verify messages exist for this chat
messageCountForChat := countRowsWithCondition(t, db, "messages", "chat_id = ?", testChatID)
if messageCountForChat != 2 {
t.Errorf("expected 2 messages for test chat, got %d", messageCountForChat)
}
// Delete the chat
if err := db.deleteChat(testChatID); err != nil {
t.Fatalf("failed to delete chat: %v", err)
}
// Verify chat is deleted
chatCountAfter := countRows(t, db, "chats")
if chatCountAfter != 0 {
t.Errorf("expected 0 chats after deletion, got %d", chatCountAfter)
}
// Verify messages are CASCADE deleted
messageCountAfter := countRows(t, db, "messages")
if messageCountAfter != 0 {
t.Errorf("expected 0 messages after CASCADE deletion, got %d", messageCountAfter)
}
// Verify specific chat no longer exists
err = db.conn.QueryRow("SELECT EXISTS(SELECT 1 FROM chats WHERE id = ?)", testChatID).Scan(&exists)
if err != nil {
t.Fatalf("failed to check chat existence after deletion: %v", err)
}
if exists {
t.Error("test chat should not exist after deletion")
}
// Verify no orphaned messages remain
orphanedCount := countRowsWithCondition(t, db, "messages", "chat_id = ?", testChatID)
if orphanedCount != 0 {
t.Errorf("expected 0 orphaned messages, got %d", orphanedCount)
}
})
t.Run("foreign keys are enabled", func(t *testing.T) {
tmpDir := t.TempDir()
dbPath := filepath.Join(tmpDir, "test.db")
db, err := newDatabase(dbPath)
if err != nil {
t.Fatalf("failed to create database: %v", err)
}
defer db.Close()
// Verify foreign keys are enabled
var foreignKeysEnabled int
err = db.conn.QueryRow("PRAGMA foreign_keys").Scan(&foreignKeysEnabled)
if err != nil {
t.Fatalf("failed to check foreign keys: %v", err)
}
if foreignKeysEnabled != 1 {
t.Errorf("expected foreign keys to be enabled (1), got %d", foreignKeysEnabled)
}
})
// This test is only relevant for v8 migrations, but we keep it here for now
// since it's a useful test to ensure that we don't introduce any new orphaned data
t.Run("cleanup orphaned data", func(t *testing.T) {
tmpDir := t.TempDir()
dbPath := filepath.Join(tmpDir, "test.db")
db, err := newDatabase(dbPath)
if err != nil {
t.Fatalf("failed to create database: %v", err)
}
defer db.Close()
// First disable foreign keys to simulate the bug from ollama/ollama#11785
_, err = db.conn.Exec("PRAGMA foreign_keys = OFF")
if err != nil {
t.Fatalf("failed to disable foreign keys: %v", err)
}
// Create a chat and message
testChatID := "orphaned-test-chat"
testMessageID := int64(999)
_, err = db.conn.Exec("INSERT INTO chats (id, title) VALUES (?, ?)", testChatID, "Orphaned Test Chat")
if err != nil {
t.Fatalf("failed to insert test chat: %v", err)
}
_, err = db.conn.Exec("INSERT INTO messages (id, chat_id, role, content) VALUES (?, ?, ?, ?)",
testMessageID, testChatID, "user", "test message")
if err != nil {
t.Fatalf("failed to insert test message: %v", err)
}
// Delete chat but keep message (simulating the bug from ollama/ollama#11785)
_, err = db.conn.Exec("DELETE FROM chats WHERE id = ?", testChatID)
if err != nil {
t.Fatalf("failed to delete chat: %v", err)
}
// Verify we have orphaned message
orphanedCount := countRowsWithCondition(t, db, "messages", "chat_id = ?", testChatID)
if orphanedCount != 1 {
t.Errorf("expected 1 orphaned message, got %d", orphanedCount)
}
// Run cleanup
if err := db.cleanupOrphanedData(); err != nil {
t.Fatalf("failed to cleanup orphaned data: %v", err)
}
// Verify orphaned message is gone
orphanedCountAfter := countRowsWithCondition(t, db, "messages", "chat_id = ?", testChatID)
if orphanedCountAfter != 0 {
t.Errorf("expected 0 orphaned messages after cleanup, got %d", orphanedCountAfter)
}
})
}
func countRows(t *testing.T, db *database, table string) int {
t.Helper()
var count int
err := db.conn.QueryRow(fmt.Sprintf("SELECT COUNT(*) FROM %s", table)).Scan(&count)
if err != nil {
t.Fatalf("failed to count rows in %s: %v", table, err)
}
return count
}
func countRowsWithCondition(t *testing.T, db *database, table, condition string, args ...interface{}) int {
t.Helper()
var count int
query := fmt.Sprintf("SELECT COUNT(*) FROM %s WHERE %s", table, condition)
err := db.conn.QueryRow(query, args...).Scan(&count)
if err != nil {
t.Fatalf("failed to count rows with condition: %v", err)
}
return count
}
// Test helpers for schema migration testing
// schemaMap returns both tables/columns and indexes (ignoring order)
func schemaMap(db *database) map[string]interface{} {
result := make(map[string]any)
result["tables"] = columnMap(db)
result["indexes"] = indexMap(db)
return result
}
// columnMap returns a map of table names to their column sets (ignoring order)
func columnMap(db *database) map[string][]string {
result := make(map[string][]string)
// Get all table names
tableQuery := `SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name`
rows, _ := db.conn.Query(tableQuery)
defer rows.Close()
for rows.Next() {
var tableName string
rows.Scan(&tableName)
// Get columns for this table
colQuery := fmt.Sprintf("PRAGMA table_info(%s)", tableName)
colRows, _ := db.conn.Query(colQuery)
var columns []string
for colRows.Next() {
var cid int
var name, dataType sql.NullString
var notNull, primaryKey int
var defaultValue sql.NullString
colRows.Scan(&cid, &name, &dataType, &notNull, &defaultValue, &primaryKey)
// Create a normalized column description
colDesc := fmt.Sprintf("%s %s", name.String, dataType.String)
if notNull == 1 {
colDesc += " NOT NULL"
}
if defaultValue.Valid && defaultValue.String != "" {
// Skip DEFAULT for schema_version as it doesn't get updated during migrations
if name.String != "schema_version" {
colDesc += " DEFAULT " + defaultValue.String
}
}
if primaryKey == 1 {
colDesc += " PRIMARY KEY"
}
columns = append(columns, colDesc)
}
colRows.Close()
// Sort columns to ignore order differences
sort.Strings(columns)
result[tableName] = columns
}
return result
}
// indexMap returns a map of index names to their definitions
func indexMap(db *database) map[string]string {
result := make(map[string]string)
// Get all indexes (excluding auto-created primary key indexes)
indexQuery := `SELECT name, sql FROM sqlite_master WHERE type='index' AND name NOT LIKE 'sqlite_%' AND sql IS NOT NULL ORDER BY name`
rows, _ := db.conn.Query(indexQuery)
defer rows.Close()
for rows.Next() {
var name, sql string
rows.Scan(&name, &sql)
// Normalize the SQL by removing extra whitespace
sql = strings.Join(strings.Fields(sql), " ")
result[name] = sql
}
return result
}
// loadV2Schema loads the version 2 schema from testdata/schema.sql
func loadV2Schema(t *testing.T, dbPath string) *database {
t.Helper()
// Read the v1 schema file
schemaFile := filepath.Join("testdata", "schema.sql")
schemaSQL, err := os.ReadFile(schemaFile)
if err != nil {
t.Fatalf("failed to read schema file: %v", err)
}
// Open database connection
conn, err := sql.Open("sqlite3", dbPath+"?_foreign_keys=on&_journal_mode=WAL&_busy_timeout=5000&_txlock=immediate")
if err != nil {
t.Fatalf("failed to open database: %v", err)
}
// Execute the v1 schema
_, err = conn.Exec(string(schemaSQL))
if err != nil {
conn.Close()
t.Fatalf("failed to execute v1 schema: %v", err)
}
return &database{conn: conn}
}

View File

@@ -1,128 +0,0 @@
//go:build windows || darwin
package store
import (
"crypto/sha256"
"encoding/hex"
"fmt"
"os"
"path/filepath"
"strings"
)
type Image struct {
Filename string `json:"filename"`
Path string `json:"path"`
Size int64 `json:"size,omitempty"`
MimeType string `json:"mime_type,omitempty"`
}
// Bytes loads image data from disk for a given ImageData reference
func (i *Image) Bytes() ([]byte, error) {
return ImgBytes(i.Path)
}
// ImgBytes reads image data from the specified file path
func ImgBytes(path string) ([]byte, error) {
if path == "" {
return nil, fmt.Errorf("empty image path")
}
data, err := os.ReadFile(path)
if err != nil {
return nil, fmt.Errorf("read image file %s: %w", path, err)
}
return data, nil
}
// ImgDir returns the directory path for storing images for a specific chat
func (s *Store) ImgDir() string {
dbPath := s.DBPath
if dbPath == "" {
dbPath = defaultDBPath
}
storeDir := filepath.Dir(dbPath)
return filepath.Join(storeDir, "cache", "images")
}
// ImgToFile saves image data to disk and returns ImageData reference
func (s *Store) ImgToFile(chatID string, imageBytes []byte, filename, mimeType string) (Image, error) {
baseImageDir := s.ImgDir()
if err := os.MkdirAll(baseImageDir, 0o755); err != nil {
return Image{}, fmt.Errorf("create base image directory: %w", err)
}
// Root prevents path traversal issues
root, err := os.OpenRoot(baseImageDir)
if err != nil {
return Image{}, fmt.Errorf("open image root directory: %w", err)
}
defer root.Close()
// Create chat-specific subdirectory within the root
chatDir := sanitize(chatID)
if err := root.Mkdir(chatDir, 0o755); err != nil && !os.IsExist(err) {
return Image{}, fmt.Errorf("create chat directory: %w", err)
}
// Generate a unique filename to avoid conflicts
// Use hash of content + original filename for uniqueness
hash := sha256.Sum256(imageBytes)
hashStr := hex.EncodeToString(hash[:])[:16] // Use first 16 chars of hash
// Extract file extension from original filename or mime type
ext := filepath.Ext(filename)
if ext == "" {
switch mimeType {
case "image/jpeg":
ext = ".jpg"
case "image/png":
ext = ".png"
case "image/webp":
ext = ".webp"
default:
ext = ".img"
}
}
// Create unique filename: hash + original name + extension
baseFilename := sanitize(strings.TrimSuffix(filename, ext))
uniqueFilename := fmt.Sprintf("%s_%s%s", hashStr, baseFilename, ext)
relativePath := filepath.Join(chatDir, uniqueFilename)
file, err := root.Create(relativePath)
if err != nil {
return Image{}, fmt.Errorf("create image file: %w", err)
}
defer file.Close()
if _, err := file.Write(imageBytes); err != nil {
return Image{}, fmt.Errorf("write image data: %w", err)
}
return Image{
Filename: uniqueFilename,
Path: filepath.Join(baseImageDir, relativePath),
Size: int64(len(imageBytes)),
MimeType: mimeType,
}, nil
}
// sanitize removes unsafe characters from filenames
func sanitize(filename string) string {
// Convert to safe characters only
safe := strings.Map(func(r rune) rune {
if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || (r >= '0' && r <= '9') || r == '-' {
return r
}
return '_'
}, filename)
// Clean up and validate
safe = strings.Trim(safe, "_")
if safe == "" {
return "image"
}
return safe
}

View File

@@ -1,231 +0,0 @@
//go:build windows || darwin
package store
import (
"database/sql"
"encoding/json"
"os"
"path/filepath"
"testing"
)
func TestConfigMigration(t *testing.T) {
tmpDir := t.TempDir()
// Create a legacy config.json
legacyConfig := legacyData{
ID: "test-device-id-12345",
FirstTimeRun: true, // In old system, true meant "has completed first run"
}
configData, err := json.MarshalIndent(legacyConfig, "", " ")
if err != nil {
t.Fatal(err)
}
configPath := filepath.Join(tmpDir, "config.json")
if err := os.WriteFile(configPath, configData, 0o644); err != nil {
t.Fatal(err)
}
// Override the legacy config path for testing
oldLegacyConfigPath := legacyConfigPath
legacyConfigPath = configPath
defer func() { legacyConfigPath = oldLegacyConfigPath }()
// Create store with database in same directory
s := Store{DBPath: filepath.Join(tmpDir, "db.sqlite")}
defer s.Close()
// First access should trigger migration
id, err := s.ID()
if err != nil {
t.Fatalf("failed to get ID: %v", err)
}
if id != "test-device-id-12345" {
t.Errorf("expected migrated ID 'test-device-id-12345', got '%s'", id)
}
// Check HasCompletedFirstRun
hasCompleted, err := s.HasCompletedFirstRun()
if err != nil {
t.Fatalf("failed to get has completed first run: %v", err)
}
if !hasCompleted {
t.Error("expected has completed first run to be true after migration")
}
// Verify migration is marked as complete
migrated, err := s.db.isConfigMigrated()
if err != nil {
t.Fatalf("failed to check migration status: %v", err)
}
if !migrated {
t.Error("expected config to be marked as migrated")
}
// Create a new store instance to verify migration doesn't run again
s2 := Store{DBPath: filepath.Join(tmpDir, "db.sqlite")}
defer s2.Close()
// Delete the config file to ensure we're not reading from it
os.Remove(configPath)
// Verify data is still there
id2, err := s2.ID()
if err != nil {
t.Fatalf("failed to get ID from second store: %v", err)
}
if id2 != "test-device-id-12345" {
t.Errorf("expected persisted ID 'test-device-id-12345', got '%s'", id2)
}
}
func TestNoConfigToMigrate(t *testing.T) {
tmpDir := t.TempDir()
// Override the legacy config path for testing
oldLegacyConfigPath := legacyConfigPath
legacyConfigPath = filepath.Join(tmpDir, "config.json")
defer func() { legacyConfigPath = oldLegacyConfigPath }()
// Create store without any config.json
s := Store{DBPath: filepath.Join(tmpDir, "db.sqlite")}
defer s.Close()
// Should generate a new ID
id, err := s.ID()
if err != nil {
t.Fatalf("failed to get ID: %v", err)
}
if id == "" {
t.Error("expected auto-generated ID, got empty string")
}
// HasCompletedFirstRun should be false (default)
hasCompleted, err := s.HasCompletedFirstRun()
if err != nil {
t.Fatalf("failed to get has completed first run: %v", err)
}
if hasCompleted {
t.Error("expected has completed first run to be false by default")
}
// Migration should still be marked as complete
migrated, err := s.db.isConfigMigrated()
if err != nil {
t.Fatalf("failed to check migration status: %v", err)
}
if !migrated {
t.Error("expected config to be marked as migrated even with no config.json")
}
}
const (
v1Schema = `
CREATE TABLE IF NOT EXISTS settings (
id INTEGER PRIMARY KEY CHECK (id = 1),
device_id TEXT NOT NULL DEFAULT '',
has_completed_first_run BOOLEAN NOT NULL DEFAULT 0,
expose BOOLEAN NOT NULL DEFAULT 0,
browser BOOLEAN NOT NULL DEFAULT 0,
models TEXT NOT NULL DEFAULT '',
remote TEXT NOT NULL DEFAULT '',
agent BOOLEAN NOT NULL DEFAULT 0,
tools BOOLEAN NOT NULL DEFAULT 0,
working_dir TEXT NOT NULL DEFAULT '',
window_width INTEGER NOT NULL DEFAULT 0,
window_height INTEGER NOT NULL DEFAULT 0,
config_migrated BOOLEAN NOT NULL DEFAULT 0,
schema_version INTEGER NOT NULL DEFAULT 1
);
-- Insert default settings row if it doesn't exist
INSERT OR IGNORE INTO settings (id) VALUES (1);
CREATE TABLE IF NOT EXISTS chats (
id TEXT PRIMARY KEY,
title TEXT NOT NULL DEFAULT '',
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
chat_id TEXT NOT NULL,
role TEXT NOT NULL,
content TEXT NOT NULL DEFAULT '',
thinking TEXT NOT NULL DEFAULT '',
stream BOOLEAN NOT NULL DEFAULT 0,
model_name TEXT,
model_cloud BOOLEAN,
model_ollama_host BOOLEAN,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
thinking_time_start TIMESTAMP,
thinking_time_end TIMESTAMP,
FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_messages_chat_id ON messages(chat_id);
CREATE TABLE IF NOT EXISTS tool_calls (
id INTEGER PRIMARY KEY AUTOINCREMENT,
message_id INTEGER NOT NULL,
type TEXT NOT NULL,
function_name TEXT NOT NULL,
function_arguments TEXT NOT NULL,
function_result TEXT,
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_tool_calls_message_id ON tool_calls(message_id);
`
)
func TestMigrationFromEpoc(t *testing.T) {
tmpDir := t.TempDir()
s := Store{DBPath: filepath.Join(tmpDir, "db.sqlite")}
defer s.Close()
// Open database connection
conn, err := sql.Open("sqlite3", s.DBPath+"?_foreign_keys=on&_journal_mode=WAL")
if err != nil {
t.Fatal(err)
}
// Test the connection
if err := conn.Ping(); err != nil {
conn.Close()
t.Fatal(err)
}
s.db = &database{conn: conn}
t.Logf("DB created: %s", s.DBPath)
_, err = s.db.conn.Exec(v1Schema)
if err != nil {
t.Fatal(err)
}
version, err := s.db.getSchemaVersion()
if err != nil {
t.Fatalf("failed to get schema version: %v", err)
}
if version != 1 {
t.Fatalf("expected: %d\n got: %d", 1, version)
}
t.Logf("v1 schema created")
if err := s.db.migrate(); err != nil {
t.Fatal(err)
}
t.Logf("migrations completed")
version, err = s.db.getSchemaVersion()
if err != nil {
t.Fatalf("failed to get schema version: %v", err)
}
if version != currentSchemaVersion {
t.Fatalf("expected: %d\n got: %d", currentSchemaVersion, version)
}
}

View File

@@ -1,61 +0,0 @@
-- This is the version 2 schema for the app database, the first released schema to users.
-- Do not modify this file. It is used to test that the database schema stays in a consistent state between schema migrations.
CREATE TABLE IF NOT EXISTS settings (
id INTEGER PRIMARY KEY CHECK (id = 1),
device_id TEXT NOT NULL DEFAULT '',
has_completed_first_run BOOLEAN NOT NULL DEFAULT 0,
expose BOOLEAN NOT NULL DEFAULT 0,
survey BOOLEAN NOT NULL DEFAULT TRUE,
browser BOOLEAN NOT NULL DEFAULT 0,
models TEXT NOT NULL DEFAULT '',
remote TEXT NOT NULL DEFAULT '',
agent BOOLEAN NOT NULL DEFAULT 0,
tools BOOLEAN NOT NULL DEFAULT 0,
working_dir TEXT NOT NULL DEFAULT '',
context_length INTEGER NOT NULL DEFAULT 4096,
window_width INTEGER NOT NULL DEFAULT 0,
window_height INTEGER NOT NULL DEFAULT 0,
config_migrated BOOLEAN NOT NULL DEFAULT 0,
schema_version INTEGER NOT NULL DEFAULT 2
);
-- Insert default settings row if it doesn't exist
INSERT OR IGNORE INTO settings (id) VALUES (1);
CREATE TABLE IF NOT EXISTS chats (
id TEXT PRIMARY KEY,
title TEXT NOT NULL DEFAULT '',
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
chat_id TEXT NOT NULL,
role TEXT NOT NULL,
content TEXT NOT NULL DEFAULT '',
thinking TEXT NOT NULL DEFAULT '',
stream BOOLEAN NOT NULL DEFAULT 0,
model_name TEXT,
model_cloud BOOLEAN,
model_ollama_host BOOLEAN,
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
thinking_time_start TIMESTAMP,
thinking_time_end TIMESTAMP,
FOREIGN KEY (chat_id) REFERENCES chats(id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_messages_chat_id ON messages(chat_id);
CREATE TABLE IF NOT EXISTS tool_calls (
id INTEGER PRIMARY KEY AUTOINCREMENT,
message_id INTEGER NOT NULL,
type TEXT NOT NULL,
function_name TEXT NOT NULL,
function_arguments TEXT NOT NULL,
function_result TEXT,
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_tool_calls_message_id ON tool_calls(message_id);

Some files were not shown because too many files have changed in this diff Show More