mirror of
https://github.com/ollama/ollama.git
synced 2025-12-24 08:10:54 -05:00
Compare commits
1 Commits
license-la
...
modelpath
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
db961934dc |
108
README.md
108
README.md
@@ -1,65 +1,75 @@
|
||||
<div align="center">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" height="200px" srcset="https://github.com/jmorganca/ollama/assets/3325447/318048d2-b2dd-459c-925a-ac8449d5f02c">
|
||||
<img alt="logo" height="200px" src="https://github.com/jmorganca/ollama/assets/3325447/c7d6e15f-7f4d-4776-b568-c084afa297c2">
|
||||
</picture>
|
||||
</div>
|
||||

|
||||
|
||||
# Ollama
|
||||
|
||||
Create, run, and share self-contained large language models (LLMs). Ollama bundles a model’s weights, configuration, prompts, and more into self-contained packages that run anywhere.
|
||||
Run large language models with `llama.cpp`.
|
||||
|
||||
> Note: Ollama is in early preview. Please report any issues you find.
|
||||
> Note: certain models that can be run with Ollama are intended for research and/or non-commercial use only.
|
||||
|
||||
## Download
|
||||
### Features
|
||||
|
||||
- [Download](https://ollama.ai/download) for macOS on Apple Silicon (Intel coming soon)
|
||||
- Download for Windows and Linux (coming soon)
|
||||
- Build [from source](#building)
|
||||
- Download and run popular large language models
|
||||
- Switch between multiple models on the fly
|
||||
- Hardware acceleration where available (Metal, CUDA)
|
||||
- Fast inference server written in Go, powered by [llama.cpp](https://github.com/ggerganov/llama.cpp)
|
||||
- REST API to use with your application (python, typescript SDKs coming soon)
|
||||
|
||||
## Examples
|
||||
## Install
|
||||
|
||||
### Quickstart
|
||||
- [Download](https://ollama.ai/download) for macOS with Apple Silicon (Intel coming soon)
|
||||
- Download for Windows (coming soon)
|
||||
|
||||
You can also build the [binary from source](#building).
|
||||
|
||||
## Quickstart
|
||||
|
||||
Run a fast and simple model.
|
||||
|
||||
```
|
||||
ollama run llama2
|
||||
>>> hi
|
||||
Hello! How can I help you today?
|
||||
ollama run orca
|
||||
```
|
||||
|
||||
### Creating a custom model
|
||||
## Example models
|
||||
|
||||
Create a `Modelfile`:
|
||||
### 💬 Chat
|
||||
|
||||
Have a conversation.
|
||||
|
||||
```
|
||||
FROM llama2
|
||||
PROMPT """
|
||||
You are Mario from Super Mario Bros. Answer as Mario, the assistant, only.
|
||||
|
||||
User: {{ .Prompt }}
|
||||
Mario:
|
||||
"""
|
||||
ollama run vicuna "Why is the sky blue?"
|
||||
```
|
||||
|
||||
Next, create and run the model:
|
||||
### 🗺️ Instructions
|
||||
|
||||
Get a helping hand.
|
||||
|
||||
```
|
||||
ollama create mario -f ./Modelfile
|
||||
ollama run mario
|
||||
>>> hi
|
||||
Hello! It's your friend Mario.
|
||||
ollama run orca "Write an email to my boss."
|
||||
```
|
||||
|
||||
## Model library
|
||||
### 🔎 Ask questions about documents
|
||||
|
||||
Ollama includes a library of open-source, pre-trained models. More models are coming soon.
|
||||
Send the contents of a document and ask questions about it.
|
||||
|
||||
| Model | Parameters | Size | Download |
|
||||
| ----------- | ---------- | ----- | ------------------------- |
|
||||
| Llama2 | 7B | 3.8GB | `ollama pull llama2` |
|
||||
| Orca Mini | 3B | 1.9GB | `ollama pull orca` |
|
||||
| Vicuna | 7B | 3.8GB | `ollama pull vicuna` |
|
||||
| Nous-Hermes | 13B | 7.3GB | `ollama pull nous-hermes` |
|
||||
```
|
||||
ollama run nous-hermes "$(cat input.txt)", please summarize this story
|
||||
```
|
||||
|
||||
### 📖 Storytelling
|
||||
|
||||
Venture into the unknown.
|
||||
|
||||
```
|
||||
ollama run nous-hermes "Once upon a time"
|
||||
```
|
||||
|
||||
## Advanced usage
|
||||
|
||||
### Run a local model
|
||||
|
||||
```
|
||||
ollama run ~/Downloads/vicuna-7b-v1.3.ggmlv3.q4_1.bin
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
@@ -76,5 +86,23 @@ To run it start the server:
|
||||
Finally, run a model!
|
||||
|
||||
```
|
||||
./ollama run llama2
|
||||
./ollama run ~/Downloads/vicuna-7b-v1.3.ggmlv3.q4_1.bin
|
||||
```
|
||||
|
||||
## API Reference
|
||||
|
||||
### `POST /api/pull`
|
||||
|
||||
Download a model
|
||||
|
||||
```
|
||||
curl -X POST http://localhost:11343/api/pull -d '{"model": "orca"}'
|
||||
```
|
||||
|
||||
### `POST /api/generate`
|
||||
|
||||
Complete a prompt
|
||||
|
||||
```
|
||||
curl -X POST http://localhost:11434/api/generate -d '{"model": "orca", "prompt": "hello!"}'
|
||||
```
|
||||
|
||||
@@ -6,31 +6,26 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
base url.URL
|
||||
HTTP http.Client
|
||||
Headers http.Header
|
||||
type StatusError struct {
|
||||
StatusCode int
|
||||
Status string
|
||||
Message string
|
||||
}
|
||||
|
||||
func checkError(resp *http.Response, body []byte) error {
|
||||
if resp.StatusCode >= 200 && resp.StatusCode < 400 {
|
||||
return nil
|
||||
func (e StatusError) Error() string {
|
||||
if e.Message != "" {
|
||||
return fmt.Sprintf("%s: %s", e.Status, e.Message)
|
||||
}
|
||||
|
||||
apiError := StatusError{StatusCode: resp.StatusCode}
|
||||
return e.Status
|
||||
}
|
||||
|
||||
err := json.Unmarshal(body, &apiError)
|
||||
if err != nil {
|
||||
// Use the full body as the message if we fail to decode a response.
|
||||
apiError.Message = string(body)
|
||||
}
|
||||
|
||||
return apiError
|
||||
type Client struct {
|
||||
base url.URL
|
||||
}
|
||||
|
||||
func NewClient(hosts ...string) *Client {
|
||||
@@ -41,60 +36,9 @@ func NewClient(hosts ...string) *Client {
|
||||
|
||||
return &Client{
|
||||
base: url.URL{Scheme: "http", Host: host},
|
||||
HTTP: http.Client{},
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Client) do(ctx context.Context, method, path string, reqData, respData any) error {
|
||||
var reqBody io.Reader
|
||||
var data []byte
|
||||
var err error
|
||||
if reqData != nil {
|
||||
data, err = json.Marshal(reqData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
reqBody = bytes.NewReader(data)
|
||||
}
|
||||
|
||||
url := c.base.JoinPath(path).String()
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, method, url, reqBody)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
req.Header.Set("Accept", "application/json")
|
||||
|
||||
for k, v := range c.Headers {
|
||||
req.Header[k] = v
|
||||
}
|
||||
|
||||
respObj, err := c.HTTP.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer respObj.Body.Close()
|
||||
|
||||
respBody, err := io.ReadAll(respObj.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := checkError(respObj, respBody); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(respBody) > 0 && respData != nil {
|
||||
if err := json.Unmarshal(respBody, respData); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
func (c *Client) stream(ctx context.Context, method, path string, data any, fn func([]byte) error) error {
|
||||
var buf *bytes.Buffer
|
||||
if data != nil {
|
||||
@@ -160,11 +104,11 @@ func (c *Client) Generate(ctx context.Context, req *GenerateRequest, fn Generate
|
||||
})
|
||||
}
|
||||
|
||||
type PullProgressFunc func(ProgressResponse) error
|
||||
type PullProgressFunc func(PullProgress) error
|
||||
|
||||
func (c *Client) Pull(ctx context.Context, req *PullRequest, fn PullProgressFunc) error {
|
||||
return c.stream(ctx, http.MethodPost, "/api/pull", req, func(bts []byte) error {
|
||||
var resp ProgressResponse
|
||||
var resp PullProgress
|
||||
if err := json.Unmarshal(bts, &resp); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -173,11 +117,11 @@ func (c *Client) Pull(ctx context.Context, req *PullRequest, fn PullProgressFunc
|
||||
})
|
||||
}
|
||||
|
||||
type PushProgressFunc func(ProgressResponse) error
|
||||
type PushProgressFunc func(PushProgress) error
|
||||
|
||||
func (c *Client) Push(ctx context.Context, req *PushRequest, fn PushProgressFunc) error {
|
||||
return c.stream(ctx, http.MethodPost, "/api/push", req, func(bts []byte) error {
|
||||
var resp ProgressResponse
|
||||
var resp PushProgress
|
||||
if err := json.Unmarshal(bts, &resp); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -198,11 +142,3 @@ func (c *Client) Create(ctx context.Context, req *CreateRequest, fn CreateProgre
|
||||
return fn(resp)
|
||||
})
|
||||
}
|
||||
|
||||
func (c *Client) List(ctx context.Context) (*ListResponse, error) {
|
||||
var lr ListResponse
|
||||
if err := c.do(ctx, http.MethodGet, "/api/tags", nil, &lr); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &lr, nil
|
||||
}
|
||||
|
||||
30
api/types.go
30
api/types.go
@@ -7,19 +7,6 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
type StatusError struct {
|
||||
StatusCode int
|
||||
Status string
|
||||
Message string
|
||||
}
|
||||
|
||||
func (e StatusError) Error() string {
|
||||
if e.Message != "" {
|
||||
return fmt.Sprintf("%s: %s", e.Status, e.Message)
|
||||
}
|
||||
return e.Status
|
||||
}
|
||||
|
||||
type GenerateRequest struct {
|
||||
Model string `json:"model"`
|
||||
Prompt string `json:"prompt"`
|
||||
@@ -43,11 +30,12 @@ type PullRequest struct {
|
||||
Password string `json:"password"`
|
||||
}
|
||||
|
||||
type ProgressResponse struct {
|
||||
type PullProgress struct {
|
||||
Status string `json:"status"`
|
||||
Digest string `json:"digest,omitempty"`
|
||||
Total int `json:"total,omitempty"`
|
||||
Completed int `json:"completed,omitempty"`
|
||||
Percent float64 `json:"percent,omitempty"`
|
||||
}
|
||||
|
||||
type PushRequest struct {
|
||||
@@ -56,14 +44,12 @@ type PushRequest struct {
|
||||
Password string `json:"password"`
|
||||
}
|
||||
|
||||
type ListResponse struct {
|
||||
Models []ListResponseModel `json:"models"`
|
||||
}
|
||||
|
||||
type ListResponseModel struct {
|
||||
Name string `json:"name"`
|
||||
ModifiedAt time.Time `json:"modified_at"`
|
||||
Size int `json:"size"`
|
||||
type PushProgress struct {
|
||||
Status string `json:"status"`
|
||||
Digest string `json:"digest,omitempty"`
|
||||
Total int `json:"total,omitempty"`
|
||||
Completed int `json:"completed,omitempty"`
|
||||
Percent float64 `json:"percent,omitempty"`
|
||||
}
|
||||
|
||||
type GenerateResponse struct {
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 403 B After Width: | Height: | Size: 442 B |
Binary file not shown.
|
Before Width: | Height: | Size: 741 B After Width: | Height: | Size: 889 B |
@@ -1,4 +1,4 @@
|
||||
import type { ForgeConfig } from '@electron-forge/shared-types'
|
||||
import type { ForgeConfig, ResolvedForgeConfig, ForgeMakeResult } from '@electron-forge/shared-types'
|
||||
import { MakerSquirrel } from '@electron-forge/maker-squirrel'
|
||||
import { MakerZIP } from '@electron-forge/maker-zip'
|
||||
import { PublisherGithub } from '@electron-forge/publisher-github'
|
||||
|
||||
@@ -19,7 +19,7 @@ export default function () {
|
||||
const [step, setStep] = useState<Step>(Step.WELCOME)
|
||||
const [commandCopied, setCommandCopied] = useState<boolean>(false)
|
||||
|
||||
const command = 'ollama run llama2'
|
||||
const command = 'ollama run orca'
|
||||
|
||||
return (
|
||||
<div className='drag'>
|
||||
@@ -77,11 +77,7 @@ export default function () {
|
||||
{command}
|
||||
</pre>
|
||||
<button
|
||||
className={`no-drag absolute right-[5px] px-2 py-2 ${
|
||||
commandCopied
|
||||
? 'text-gray-900 opacity-100 hover:cursor-auto'
|
||||
: 'text-gray-200 opacity-50 hover:cursor-pointer'
|
||||
} hover:font-bold hover:text-gray-900 group-hover:opacity-100`}
|
||||
className={`no-drag absolute right-[5px] px-2 py-2 ${commandCopied ? 'text-gray-900 opacity-100 hover:cursor-auto' : 'text-gray-200 opacity-50 hover:cursor-pointer'} hover:text-gray-900 hover:font-bold group-hover:opacity-100`}
|
||||
onClick={() => {
|
||||
copy(command)
|
||||
setCommandCopied(true)
|
||||
@@ -89,15 +85,13 @@ export default function () {
|
||||
}}
|
||||
>
|
||||
{commandCopied ? (
|
||||
<CheckIcon className='h-4 w-4 font-bold text-gray-500' />
|
||||
<CheckIcon className='h-4 w-4 text-gray-500 font-bold' />
|
||||
) : (
|
||||
<DocumentDuplicateIcon className='h-4 w-4 text-gray-500' />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
<p className='mx-auto my-4 w-[70%] text-xs text-gray-400'>
|
||||
Run this command in your favorite terminal.
|
||||
</p>
|
||||
<p className='mx-auto my-4 w-[70%] text-xs text-gray-400'>Run this command in your favorite terminal.</p>
|
||||
</div>
|
||||
<button
|
||||
onClick={() => {
|
||||
|
||||
@@ -162,6 +162,7 @@ app.on('ready', () => {
|
||||
|
||||
// This is the first run or the CLI is no longer installed
|
||||
app.setLoginItemSettings({ openAtLogin: true })
|
||||
|
||||
firstRunWindow()
|
||||
})
|
||||
|
||||
|
||||
110
cmd/cmd.go
110
cmd/cmd.go
@@ -13,37 +13,30 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/dustin/go-humanize"
|
||||
"github.com/olekukonko/tablewriter"
|
||||
"github.com/schollz/progressbar/v3"
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/term"
|
||||
|
||||
"github.com/jmorganca/ollama/api"
|
||||
"github.com/jmorganca/ollama/format"
|
||||
"github.com/jmorganca/ollama/server"
|
||||
)
|
||||
|
||||
func create(cmd *cobra.Command, args []string) error {
|
||||
filename, _ := cmd.Flags().GetString("file")
|
||||
filename, err := filepath.Abs(filename)
|
||||
func cacheDir() string {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return err
|
||||
panic(err)
|
||||
}
|
||||
|
||||
client := api.NewClient()
|
||||
return filepath.Join(home, ".ollama")
|
||||
}
|
||||
|
||||
var spinner *Spinner
|
||||
func create(cmd *cobra.Command, args []string) error {
|
||||
filename, _ := cmd.Flags().GetString("file")
|
||||
client := api.NewClient()
|
||||
|
||||
request := api.CreateRequest{Name: args[0], Path: filename}
|
||||
fn := func(resp api.CreateProgress) error {
|
||||
if spinner != nil {
|
||||
spinner.Stop()
|
||||
}
|
||||
|
||||
spinner = NewSpinner(resp.Status)
|
||||
go spinner.Spin(100 * time.Millisecond)
|
||||
|
||||
fmt.Println(resp.Status)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -51,10 +44,6 @@ func create(cmd *cobra.Command, args []string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if spinner != nil {
|
||||
spinner.Stop()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -89,7 +78,7 @@ func push(cmd *cobra.Command, args []string) error {
|
||||
client := api.NewClient()
|
||||
|
||||
request := api.PushRequest{Name: args[0]}
|
||||
fn := func(resp api.ProgressResponse) error {
|
||||
fn := func(resp api.PushProgress) error {
|
||||
fmt.Println(resp.Status)
|
||||
return nil
|
||||
}
|
||||
@@ -100,34 +89,6 @@ func push(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func list(cmd *cobra.Command, args []string) error {
|
||||
client := api.NewClient()
|
||||
|
||||
models, err := client.List(context.Background())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var data [][]string
|
||||
|
||||
for _, m := range models.Models {
|
||||
data = append(data, []string{m.Name, humanize.Bytes(uint64(m.Size)), format.HumanTime(m.ModifiedAt, "Never")})
|
||||
}
|
||||
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
table.SetHeader([]string{"NAME", "SIZE", "MODIFIED"})
|
||||
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetHeaderLine(false)
|
||||
table.SetBorder(false)
|
||||
table.SetNoWhiteSpace(true)
|
||||
table.SetTablePadding("\t")
|
||||
table.AppendBulk(data)
|
||||
table.Render()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func RunPull(cmd *cobra.Command, args []string) error {
|
||||
return pull(args[0])
|
||||
}
|
||||
@@ -135,23 +96,25 @@ func RunPull(cmd *cobra.Command, args []string) error {
|
||||
func pull(model string) error {
|
||||
client := api.NewClient()
|
||||
|
||||
var currentDigest string
|
||||
var bar *progressbar.ProgressBar
|
||||
|
||||
currentLayer := ""
|
||||
request := api.PullRequest{Name: model}
|
||||
fn := func(resp api.ProgressResponse) error {
|
||||
if resp.Digest != currentDigest && resp.Digest != "" {
|
||||
currentDigest = resp.Digest
|
||||
fn := func(resp api.PullProgress) error {
|
||||
if resp.Digest != currentLayer && resp.Digest != "" {
|
||||
if currentLayer != "" {
|
||||
fmt.Println()
|
||||
}
|
||||
currentLayer = resp.Digest
|
||||
layerStr := resp.Digest[7:23] + "..."
|
||||
bar = progressbar.DefaultBytes(
|
||||
int64(resp.Total),
|
||||
fmt.Sprintf("pulling %s...", resp.Digest[7:19]),
|
||||
"pulling "+layerStr,
|
||||
)
|
||||
|
||||
bar.Set(resp.Completed)
|
||||
} else if resp.Digest == currentDigest && resp.Digest != "" {
|
||||
} else if resp.Digest == currentLayer && resp.Digest != "" {
|
||||
bar.Set(resp.Completed)
|
||||
} else {
|
||||
currentDigest = ""
|
||||
currentLayer = ""
|
||||
fmt.Println(resp.Status)
|
||||
}
|
||||
return nil
|
||||
@@ -182,8 +145,24 @@ func generate(cmd *cobra.Command, model, prompt string) error {
|
||||
if len(strings.TrimSpace(prompt)) > 0 {
|
||||
client := api.NewClient()
|
||||
|
||||
spinner := NewSpinner("")
|
||||
go spinner.Spin(60 * time.Millisecond)
|
||||
spinner := progressbar.NewOptions(-1,
|
||||
progressbar.OptionSetWriter(os.Stderr),
|
||||
progressbar.OptionThrottle(60*time.Millisecond),
|
||||
progressbar.OptionSpinnerType(14),
|
||||
progressbar.OptionSetRenderBlankState(true),
|
||||
progressbar.OptionSetElapsedTime(false),
|
||||
progressbar.OptionClearOnFinish(),
|
||||
)
|
||||
|
||||
go func() {
|
||||
for range time.Tick(60 * time.Millisecond) {
|
||||
if spinner.IsFinished() {
|
||||
break
|
||||
}
|
||||
|
||||
spinner.Add(1)
|
||||
}
|
||||
}()
|
||||
|
||||
var latest api.GenerateResponse
|
||||
|
||||
@@ -282,6 +261,10 @@ func NewCLI() *cobra.Command {
|
||||
CompletionOptions: cobra.CompletionOptions{
|
||||
DisableDefaultCmd: true,
|
||||
},
|
||||
PersistentPreRunE: func(_ *cobra.Command, args []string) error {
|
||||
// create the models directory and it's parent
|
||||
return os.MkdirAll(filepath.Join(cacheDir(), "models"), 0o700)
|
||||
},
|
||||
}
|
||||
|
||||
cobra.EnableCommandSorting = false
|
||||
@@ -325,19 +308,12 @@ func NewCLI() *cobra.Command {
|
||||
RunE: push,
|
||||
}
|
||||
|
||||
listCmd := &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List models",
|
||||
RunE: list,
|
||||
}
|
||||
|
||||
rootCmd.AddCommand(
|
||||
serveCmd,
|
||||
createCmd,
|
||||
runCmd,
|
||||
pullCmd,
|
||||
pushCmd,
|
||||
listCmd,
|
||||
)
|
||||
|
||||
return rootCmd
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/schollz/progressbar/v3"
|
||||
)
|
||||
|
||||
type Spinner struct {
|
||||
description string
|
||||
*progressbar.ProgressBar
|
||||
}
|
||||
|
||||
func NewSpinner(description string) *Spinner {
|
||||
return &Spinner{
|
||||
description: description,
|
||||
ProgressBar: progressbar.NewOptions(-1,
|
||||
progressbar.OptionSetWriter(os.Stderr),
|
||||
progressbar.OptionThrottle(60*time.Millisecond),
|
||||
progressbar.OptionSpinnerType(14),
|
||||
progressbar.OptionSetRenderBlankState(true),
|
||||
progressbar.OptionSetElapsedTime(false),
|
||||
progressbar.OptionClearOnFinish(),
|
||||
progressbar.OptionSetDescription(description),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Spinner) Spin(tick time.Duration) {
|
||||
for range time.Tick(tick) {
|
||||
if s.IsFinished() {
|
||||
break
|
||||
}
|
||||
|
||||
s.Add(1)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Spinner) Stop() {
|
||||
s.Finish()
|
||||
fmt.Println(s.description)
|
||||
}
|
||||
@@ -3,13 +3,13 @@
|
||||
Install required tools:
|
||||
|
||||
```
|
||||
brew install go
|
||||
brew install cmake go node
|
||||
```
|
||||
|
||||
Then build ollama:
|
||||
Then run `make`:
|
||||
|
||||
```
|
||||
go build .
|
||||
make
|
||||
```
|
||||
|
||||
Now you can run `ollama`:
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
# Ollama Model File Reference
|
||||
|
||||
Ollama can build models automatically by reading the instructions from a Modelfile. A Modelfile is a text document that represents the complete configuration of the Model. You can see that a Modelfile is very similar to a Dockerfile.
|
||||
|
||||
## Format
|
||||
|
||||
Here is the format of the Modelfile:
|
||||
|
||||
```modelfile
|
||||
# comment
|
||||
INSTRUCTION arguments
|
||||
```
|
||||
|
||||
Nothing in the file is case-sensitive. However, the convention is for instructions to be uppercase to make it easier to distinguish from the arguments.
|
||||
|
||||
A Modelfile can include instructions in any order. But the convention is to start the Modelfile with the FROM instruction.
|
||||
|
||||
Although the example above shows a comment starting with a hash character, any instruction that is not recognized is seen as a comment.
|
||||
|
||||
## FROM
|
||||
|
||||
```modelfile
|
||||
FROM <image>[:<tag>]
|
||||
```
|
||||
|
||||
This defines the base model to be used. An image can be a known image on the Ollama Hub, or a fully-qualified path to a model file on your system
|
||||
|
||||
## PARAMETER
|
||||
|
||||
The PARAMETER instruction defines a parameter that can be set when the model is run.
|
||||
|
||||
```modelfile
|
||||
PARAMETER <parameter> <parametervalue>
|
||||
```
|
||||
|
||||
### Valid Parameters and Values
|
||||
|
||||
| Parameter | Description | Value Type | Value Range |
|
||||
| ---------------- | ------------------------------------------------------------------------------------------- | ---------- | ----------- |
|
||||
| NumCtx | | int | |
|
||||
| NumGPU | | int | |
|
||||
| MainGPU | | int | |
|
||||
| LowVRAM | | bool | |
|
||||
| F16KV | | bool | |
|
||||
| LogitsAll | | bool | |
|
||||
| VocabOnly | | bool | |
|
||||
| UseMMap | | bool | |
|
||||
| EmbeddingOnly | | bool | |
|
||||
| RepeatLastN | | int | |
|
||||
| RepeatPenalty | | float | |
|
||||
| FrequencyPenalty | | float | |
|
||||
| PresencePenalty | | float | |
|
||||
| temperature | The temperature of the model. Higher temperatures result in more creativity in the response | float | 0 - 1 |
|
||||
| TopK | | int | |
|
||||
| TopP | | float | |
|
||||
| TFSZ | | float | |
|
||||
| TypicalP | | float | |
|
||||
| Mirostat | | int | |
|
||||
| MirostatTau | | float | |
|
||||
| MirostatEta | | float | |
|
||||
| NumThread | | int | |
|
||||
|
||||
|
||||
## PROMPT
|
||||
|
||||
Prompt is a multiline instruction that defines the prompt to be used when the model is run. Typically there are 3-4 components to a prompt: System, context, user, and response.
|
||||
|
||||
```modelfile
|
||||
PROMPT """
|
||||
{{- if not .Context }}
|
||||
### System:
|
||||
You are a content marketer who needs to come up with a short but succinct tweet. Make sure to include the appropriate hashtags and links. Sometimes when appropriate, describe a meme that can be includes as well. All answers should be in the form of a tweet which has a max size of 280 characters. Every instruction will be the topic to create a tweet about.
|
||||
{{- end }}
|
||||
### Instruction:
|
||||
{{ .Prompt }}
|
||||
|
||||
### Response:
|
||||
"""
|
||||
|
||||
```
|
||||
@@ -1,15 +0,0 @@
|
||||
# Examples
|
||||
|
||||
This directory contains examples that can be created and run with `ollama`.
|
||||
|
||||
To create a model:
|
||||
|
||||
```
|
||||
ollama create example -f <example file>
|
||||
```
|
||||
|
||||
To run a model:
|
||||
|
||||
```
|
||||
ollama run example
|
||||
```
|
||||
@@ -1,7 +0,0 @@
|
||||
FROM llama2
|
||||
PARAMETER temperature 1
|
||||
PROMPT """
|
||||
System: You are Mario from super mario bros, acting as an assistant.
|
||||
User: {{ .Prompt }}
|
||||
Assistant:
|
||||
"""
|
||||
@@ -1,14 +0,0 @@
|
||||
# Modelfile for creating a Midjourney prompts from a topic
|
||||
# Run `ollama create mj -f pathtofile` and then `ollama run mj` and enter a topic
|
||||
|
||||
FROM library/nous-hermes:latest
|
||||
PROMPT """
|
||||
{{- if not .Context }}
|
||||
### System:
|
||||
Embrace your role as an AI-powered creative assistant, employing Midjourney to manifest compelling AI-generated art. I will outline a specific image concept, and in response, you must produce an exhaustive, multifaceted prompt for Midjourney, ensuring every detail of the original concept is represented in your instructions. Midjourney doesn't do well with text, so after the prompt, give me instructions that I can use to create the titles in a image editor.
|
||||
{{- end }}
|
||||
### Instruction:
|
||||
{{ .Prompt }}
|
||||
|
||||
### Response:
|
||||
"""
|
||||
15
examples/python/README.md
Normal file
15
examples/python/README.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Python
|
||||
|
||||
This is a simple example of calling the Ollama api from a python app.
|
||||
|
||||
First, download a model:
|
||||
|
||||
```
|
||||
curl -L https://huggingface.co/TheBloke/orca_mini_3B-GGML/resolve/main/orca-mini-3b.ggmlv3.q4_1.bin -o orca.bin
|
||||
```
|
||||
|
||||
Then run it using the example script. You'll need to have Ollama running on your machine.
|
||||
|
||||
```
|
||||
python3 main.py orca.bin
|
||||
```
|
||||
32
examples/python/main.py
Normal file
32
examples/python/main.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import http.client
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python main.py <model file>")
|
||||
sys.exit(1)
|
||||
|
||||
conn = http.client.HTTPConnection('localhost', 11434)
|
||||
|
||||
headers = { 'Content-Type': 'application/json' }
|
||||
|
||||
# generate text from the model
|
||||
conn.request("POST", "/api/generate", json.dumps({
|
||||
'model': os.path.join(os.getcwd(), sys.argv[1]),
|
||||
'prompt': 'write me a short story',
|
||||
'stream': True
|
||||
}), headers)
|
||||
|
||||
response = conn.getresponse()
|
||||
|
||||
def parse_generate(data):
|
||||
for event in data.decode('utf-8').split("\n"):
|
||||
if not event:
|
||||
continue
|
||||
yield event
|
||||
|
||||
if response.status == 200:
|
||||
for chunk in response:
|
||||
for event in parse_generate(chunk):
|
||||
print(json.loads(event)['response'], end="", flush=True)
|
||||
@@ -1,13 +0,0 @@
|
||||
# Modelfile for creating a recipe from a list of ingredients
|
||||
# Run `ollama create recipemaker -f pathtofile` and then `ollama run recipemaker` and feed it lists of ingredients to create recipes around.
|
||||
FROM library/nous-hermes:latest
|
||||
PROMPT """
|
||||
{{- if not .Context }}
|
||||
### System:
|
||||
The instruction will be a list of ingredients. You should generate a recipe that can be made in less than an hour. You can also include ingredients that most people will find in their pantry every day. The recipe should be 4 people and you should include a description of what the meal will taste like
|
||||
{{- end }}
|
||||
### Instruction:
|
||||
{{ .Prompt }}
|
||||
|
||||
### Response:
|
||||
"""
|
||||
@@ -1,14 +0,0 @@
|
||||
# Modelfile for creating a tweet from a topic
|
||||
# Run `ollama create tweetwriter -f pathtofile` and then `ollama run tweetwriter` and enter a topic
|
||||
|
||||
FROM library/nous-hermes:latest
|
||||
PROMPT """
|
||||
{{- if not .Context }}
|
||||
### System:
|
||||
You are a content marketer who needs to come up with a short but succinct tweet. Make sure to include the appropriate hashtags and links. Sometimes when appropriate, describe a meme that can be includes as well. All answers should be in the form of a tweet which has a max size of 280 characters. Every instruction will be the topic to create a tweet about.
|
||||
{{- end }}
|
||||
### Instruction:
|
||||
{{ .Prompt }}
|
||||
|
||||
### Response:
|
||||
"""
|
||||
141
format/time.go
141
format/time.go
@@ -1,141 +0,0 @@
|
||||
package format
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// HumanDuration returns a human-readable approximation of a duration
|
||||
// (eg. "About a minute", "4 hours ago", etc.).
|
||||
// Modified version of github.com/docker/go-units.HumanDuration
|
||||
func HumanDuration(d time.Duration) string {
|
||||
return HumanDurationWithCase(d, true)
|
||||
}
|
||||
|
||||
// HumanDurationWithCase returns a human-readable approximation of a
|
||||
// duration (eg. "About a minute", "4 hours ago", etc.). but allows
|
||||
// you to specify whether the first word should be capitalized
|
||||
// (eg. "About" vs. "about")
|
||||
func HumanDurationWithCase(d time.Duration, useCaps bool) string {
|
||||
seconds := int(d.Seconds())
|
||||
|
||||
switch {
|
||||
case seconds < 1:
|
||||
if useCaps {
|
||||
return "Less than a second"
|
||||
}
|
||||
return "less than a second"
|
||||
case seconds == 1:
|
||||
return "1 second"
|
||||
case seconds < 60:
|
||||
return fmt.Sprintf("%d seconds", seconds)
|
||||
}
|
||||
|
||||
minutes := int(d.Minutes())
|
||||
switch {
|
||||
case minutes == 1:
|
||||
if useCaps {
|
||||
return "About a minute"
|
||||
}
|
||||
return "about a minute"
|
||||
case minutes < 60:
|
||||
return fmt.Sprintf("%d minutes", minutes)
|
||||
}
|
||||
|
||||
hours := int(math.Round(d.Hours()))
|
||||
switch {
|
||||
case hours == 1:
|
||||
if useCaps {
|
||||
return "About an hour"
|
||||
}
|
||||
return "about an hour"
|
||||
case hours < 48:
|
||||
return fmt.Sprintf("%d hours", hours)
|
||||
case hours < 24*7*2:
|
||||
return fmt.Sprintf("%d days", hours/24)
|
||||
case hours < 24*30*2:
|
||||
return fmt.Sprintf("%d weeks", hours/24/7)
|
||||
case hours < 24*365*2:
|
||||
return fmt.Sprintf("%d months", hours/24/30)
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%d years", int(d.Hours())/24/365)
|
||||
}
|
||||
|
||||
func HumanTime(t time.Time, zeroValue string) string {
|
||||
return humanTimeWithCase(t, zeroValue, true)
|
||||
}
|
||||
|
||||
func HumanTimeLower(t time.Time, zeroValue string) string {
|
||||
return humanTimeWithCase(t, zeroValue, false)
|
||||
}
|
||||
|
||||
func humanTimeWithCase(t time.Time, zeroValue string, useCaps bool) string {
|
||||
if t.IsZero() {
|
||||
return zeroValue
|
||||
}
|
||||
|
||||
delta := time.Since(t)
|
||||
if delta < 0 {
|
||||
return HumanDurationWithCase(-delta, useCaps) + " from now"
|
||||
}
|
||||
return HumanDurationWithCase(delta, useCaps) + " ago"
|
||||
}
|
||||
|
||||
// ExcatDuration returns a human readable hours/minutes/seconds or milliseconds format of a duration
|
||||
// the most precise level of duration is milliseconds
|
||||
func ExactDuration(d time.Duration) string {
|
||||
if d.Seconds() < 1 {
|
||||
if d.Milliseconds() == 1 {
|
||||
return fmt.Sprintf("%d millisecond", d.Milliseconds())
|
||||
}
|
||||
return fmt.Sprintf("%d milliseconds", d.Milliseconds())
|
||||
}
|
||||
|
||||
var readableDur strings.Builder
|
||||
|
||||
dur := d.String()
|
||||
|
||||
// split the default duration string format of 0h0m0s into something nicer to read
|
||||
h := strings.Split(dur, "h")
|
||||
if len(h) > 1 {
|
||||
hours := h[0]
|
||||
if hours == "1" {
|
||||
readableDur.WriteString(fmt.Sprintf("%s hour ", hours))
|
||||
} else {
|
||||
readableDur.WriteString(fmt.Sprintf("%s hours ", hours))
|
||||
}
|
||||
dur = h[1]
|
||||
}
|
||||
|
||||
m := strings.Split(dur, "m")
|
||||
if len(m) > 1 {
|
||||
mins := m[0]
|
||||
switch mins {
|
||||
case "0":
|
||||
// skip
|
||||
case "1":
|
||||
readableDur.WriteString(fmt.Sprintf("%s minute ", mins))
|
||||
default:
|
||||
readableDur.WriteString(fmt.Sprintf("%s minutes ", mins))
|
||||
}
|
||||
dur = m[1]
|
||||
}
|
||||
|
||||
s := strings.Split(dur, "s")
|
||||
if len(s) > 0 {
|
||||
sec := s[0]
|
||||
switch sec {
|
||||
case "0":
|
||||
// skip
|
||||
case "1":
|
||||
readableDur.WriteString(fmt.Sprintf("%s second ", sec))
|
||||
default:
|
||||
readableDur.WriteString(fmt.Sprintf("%s seconds ", sec))
|
||||
}
|
||||
}
|
||||
|
||||
return strings.TrimSpace(readableDur.String())
|
||||
}
|
||||
@@ -1,102 +0,0 @@
|
||||
package format
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func assertEqual(t *testing.T, a interface{}, b interface{}) {
|
||||
if a != b {
|
||||
t.Errorf("Assert failed, expected %v, got %v", b, a)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHumanDuration(t *testing.T) {
|
||||
day := 24 * time.Hour
|
||||
week := 7 * day
|
||||
month := 30 * day
|
||||
year := 365 * day
|
||||
|
||||
assertEqual(t, "Less than a second", HumanDuration(450*time.Millisecond))
|
||||
assertEqual(t, "Less than a second", HumanDurationWithCase(450*time.Millisecond, true))
|
||||
assertEqual(t, "less than a second", HumanDurationWithCase(450*time.Millisecond, false))
|
||||
assertEqual(t, "1 second", HumanDuration(1*time.Second))
|
||||
assertEqual(t, "45 seconds", HumanDuration(45*time.Second))
|
||||
assertEqual(t, "46 seconds", HumanDuration(46*time.Second))
|
||||
assertEqual(t, "59 seconds", HumanDuration(59*time.Second))
|
||||
assertEqual(t, "About a minute", HumanDuration(60*time.Second))
|
||||
assertEqual(t, "About a minute", HumanDurationWithCase(1*time.Minute, true))
|
||||
assertEqual(t, "about a minute", HumanDurationWithCase(1*time.Minute, false))
|
||||
assertEqual(t, "3 minutes", HumanDuration(3*time.Minute))
|
||||
assertEqual(t, "35 minutes", HumanDuration(35*time.Minute))
|
||||
assertEqual(t, "35 minutes", HumanDuration(35*time.Minute+40*time.Second))
|
||||
assertEqual(t, "45 minutes", HumanDuration(45*time.Minute))
|
||||
assertEqual(t, "45 minutes", HumanDuration(45*time.Minute+40*time.Second))
|
||||
assertEqual(t, "46 minutes", HumanDuration(46*time.Minute))
|
||||
assertEqual(t, "59 minutes", HumanDuration(59*time.Minute))
|
||||
assertEqual(t, "About an hour", HumanDuration(1*time.Hour))
|
||||
assertEqual(t, "About an hour", HumanDurationWithCase(1*time.Hour+29*time.Minute, true))
|
||||
assertEqual(t, "about an hour", HumanDurationWithCase(1*time.Hour+29*time.Minute, false))
|
||||
assertEqual(t, "2 hours", HumanDuration(1*time.Hour+31*time.Minute))
|
||||
assertEqual(t, "2 hours", HumanDuration(1*time.Hour+59*time.Minute))
|
||||
assertEqual(t, "3 hours", HumanDuration(3*time.Hour))
|
||||
assertEqual(t, "3 hours", HumanDuration(3*time.Hour+29*time.Minute))
|
||||
assertEqual(t, "4 hours", HumanDuration(3*time.Hour+31*time.Minute))
|
||||
assertEqual(t, "4 hours", HumanDuration(3*time.Hour+59*time.Minute))
|
||||
assertEqual(t, "4 hours", HumanDuration(3*time.Hour+60*time.Minute))
|
||||
assertEqual(t, "24 hours", HumanDuration(24*time.Hour))
|
||||
assertEqual(t, "36 hours", HumanDuration(1*day+12*time.Hour))
|
||||
assertEqual(t, "2 days", HumanDuration(2*day))
|
||||
assertEqual(t, "7 days", HumanDuration(7*day))
|
||||
assertEqual(t, "13 days", HumanDuration(13*day+5*time.Hour))
|
||||
assertEqual(t, "2 weeks", HumanDuration(2*week))
|
||||
assertEqual(t, "2 weeks", HumanDuration(2*week+4*day))
|
||||
assertEqual(t, "3 weeks", HumanDuration(3*week))
|
||||
assertEqual(t, "4 weeks", HumanDuration(4*week))
|
||||
assertEqual(t, "4 weeks", HumanDuration(4*week+3*day))
|
||||
assertEqual(t, "4 weeks", HumanDuration(1*month))
|
||||
assertEqual(t, "6 weeks", HumanDuration(1*month+2*week))
|
||||
assertEqual(t, "2 months", HumanDuration(2*month))
|
||||
assertEqual(t, "2 months", HumanDuration(2*month+2*week))
|
||||
assertEqual(t, "3 months", HumanDuration(3*month))
|
||||
assertEqual(t, "3 months", HumanDuration(3*month+1*week))
|
||||
assertEqual(t, "5 months", HumanDuration(5*month+2*week))
|
||||
assertEqual(t, "13 months", HumanDuration(13*month))
|
||||
assertEqual(t, "23 months", HumanDuration(23*month))
|
||||
assertEqual(t, "24 months", HumanDuration(24*month))
|
||||
assertEqual(t, "2 years", HumanDuration(24*month+2*week))
|
||||
assertEqual(t, "3 years", HumanDuration(3*year+2*month))
|
||||
}
|
||||
|
||||
func TestHumanTime(t *testing.T) {
|
||||
now := time.Now()
|
||||
|
||||
t.Run("zero value", func(t *testing.T) {
|
||||
assertEqual(t, HumanTime(time.Time{}, "never"), "never")
|
||||
})
|
||||
t.Run("time in the future", func(t *testing.T) {
|
||||
v := now.Add(48 * time.Hour)
|
||||
assertEqual(t, HumanTime(v, ""), "2 days from now")
|
||||
})
|
||||
t.Run("time in the past", func(t *testing.T) {
|
||||
v := now.Add(-48 * time.Hour)
|
||||
assertEqual(t, HumanTime(v, ""), "2 days ago")
|
||||
})
|
||||
}
|
||||
|
||||
func TestExactDuration(t *testing.T) {
|
||||
assertEqual(t, "1 millisecond", ExactDuration(1*time.Millisecond))
|
||||
assertEqual(t, "10 milliseconds", ExactDuration(10*time.Millisecond))
|
||||
assertEqual(t, "1 second", ExactDuration(1*time.Second))
|
||||
assertEqual(t, "10 seconds", ExactDuration(10*time.Second))
|
||||
assertEqual(t, "1 minute", ExactDuration(1*time.Minute))
|
||||
assertEqual(t, "10 minutes", ExactDuration(10*time.Minute))
|
||||
assertEqual(t, "1 hour", ExactDuration(1*time.Hour))
|
||||
assertEqual(t, "10 hours", ExactDuration(10*time.Hour))
|
||||
assertEqual(t, "1 hour 1 second", ExactDuration(1*time.Hour+1*time.Second))
|
||||
assertEqual(t, "1 hour 10 seconds", ExactDuration(1*time.Hour+10*time.Second))
|
||||
assertEqual(t, "1 hour 1 minute", ExactDuration(1*time.Hour+1*time.Minute))
|
||||
assertEqual(t, "1 hour 10 minutes", ExactDuration(1*time.Hour+10*time.Minute))
|
||||
assertEqual(t, "1 hour 1 minute 1 second", ExactDuration(1*time.Hour+1*time.Minute+1*time.Second))
|
||||
assertEqual(t, "10 hours 10 minutes 10 seconds", ExactDuration(10*time.Hour+10*time.Minute+10*time.Second))
|
||||
}
|
||||
2
go.mod
2
go.mod
@@ -3,9 +3,7 @@ module github.com/jmorganca/ollama
|
||||
go 1.20
|
||||
|
||||
require (
|
||||
github.com/dustin/go-humanize v1.0.1
|
||||
github.com/gin-gonic/gin v1.9.1
|
||||
github.com/olekukonko/tablewriter v0.0.5
|
||||
github.com/spf13/cobra v1.7.0
|
||||
)
|
||||
|
||||
|
||||
5
go.sum
5
go.sum
@@ -10,8 +10,6 @@ github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46t
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
|
||||
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||
@@ -45,7 +43,6 @@ github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNa
|
||||
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
|
||||
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-runewidth v0.0.14 h1:+xnbZSEeDbOIg5/mE6JF0w6n9duR1l3/WmbinWVwUuU=
|
||||
github.com/mattn/go-runewidth v0.0.14/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
|
||||
@@ -55,8 +52,6 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
|
||||
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
|
||||
@@ -38,7 +38,7 @@ func Parse(reader io.Reader) ([]Command, error) {
|
||||
}
|
||||
|
||||
command := Command{}
|
||||
switch strings.ToUpper(fields[0]) {
|
||||
switch fields[0] {
|
||||
case "FROM":
|
||||
command.Name = "model"
|
||||
command.Arg = fields[1]
|
||||
@@ -46,8 +46,8 @@ func Parse(reader io.Reader) ([]Command, error) {
|
||||
return nil, fmt.Errorf("no model specified in FROM line")
|
||||
}
|
||||
foundModel = true
|
||||
case "PROMPT", "LICENSE":
|
||||
command.Name = strings.ToLower(fields[0])
|
||||
case "PROMPT":
|
||||
command.Name = "prompt"
|
||||
if fields[1] == `"""` {
|
||||
multiline = true
|
||||
multilineCommand = &command
|
||||
|
||||
156
server/images.go
156
server/images.go
@@ -3,6 +3,7 @@ package server
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
@@ -41,9 +42,10 @@ type Layer struct {
|
||||
Size int `json:"size"`
|
||||
}
|
||||
|
||||
type LayerReader struct {
|
||||
type LayerWithBuffer struct {
|
||||
Layer
|
||||
io.Reader
|
||||
|
||||
Buffer *bytes.Buffer
|
||||
}
|
||||
|
||||
type ConfigV2 struct {
|
||||
@@ -57,15 +59,6 @@ type RootFS struct {
|
||||
DiffIDs []string `json:"diff_ids"`
|
||||
}
|
||||
|
||||
func (m *ManifestV2) GetTotalSize() int {
|
||||
var total int
|
||||
for _, layer := range m.Layers {
|
||||
total += layer.Size
|
||||
}
|
||||
total += m.Config.Size
|
||||
return total
|
||||
}
|
||||
|
||||
func GetManifest(mp ModelPath) (*ManifestV2, error) {
|
||||
fp, err := mp.GetManifestPath(false)
|
||||
if err != nil {
|
||||
@@ -159,7 +152,7 @@ func CreateModel(name string, mf io.Reader, fn func(status string)) error {
|
||||
return err
|
||||
}
|
||||
|
||||
var layers []*LayerReader
|
||||
var layers []*LayerWithBuffer
|
||||
params := make(map[string]string)
|
||||
|
||||
for _, c := range commands {
|
||||
@@ -215,16 +208,6 @@ func CreateModel(name string, mf io.Reader, fn func(status string)) error {
|
||||
}
|
||||
l.MediaType = "application/vnd.ollama.image.prompt"
|
||||
layers = append(layers, l)
|
||||
case "license":
|
||||
fn("creating license layer")
|
||||
license := strings.NewReader(c.Arg)
|
||||
l, err := CreateLayer(license)
|
||||
if err != nil {
|
||||
fn(fmt.Sprintf("couldn't create license layer: %v", err))
|
||||
return fmt.Errorf("failed to create layer: %v", err)
|
||||
}
|
||||
l.MediaType = "application/vnd.ollama.image.license"
|
||||
layers = append(layers, l)
|
||||
default:
|
||||
params[c.Name] = c.Arg
|
||||
}
|
||||
@@ -282,7 +265,7 @@ func CreateModel(name string, mf io.Reader, fn func(status string)) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func removeLayerFromLayers(layers []*LayerReader, mediaType string) []*LayerReader {
|
||||
func removeLayerFromLayers(layers []*LayerWithBuffer, mediaType string) []*LayerWithBuffer {
|
||||
j := 0
|
||||
for _, l := range layers {
|
||||
if l.MediaType != mediaType {
|
||||
@@ -293,7 +276,7 @@ func removeLayerFromLayers(layers []*LayerReader, mediaType string) []*LayerRead
|
||||
return layers[:j]
|
||||
}
|
||||
|
||||
func SaveLayers(layers []*LayerReader, fn func(status string), force bool) error {
|
||||
func SaveLayers(layers []*LayerWithBuffer, fn func(status string), force bool) error {
|
||||
// Write each of the layers to disk
|
||||
for _, layer := range layers {
|
||||
fp, err := GetBlobsPath(layer.Digest)
|
||||
@@ -311,10 +294,10 @@ func SaveLayers(layers []*LayerReader, fn func(status string), force bool) error
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
if _, err = io.Copy(out, layer.Reader); err != nil {
|
||||
_, err = io.Copy(out, layer.Buffer)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
} else {
|
||||
fn(fmt.Sprintf("using already created layer %s", layer.Digest))
|
||||
}
|
||||
@@ -323,7 +306,7 @@ func SaveLayers(layers []*LayerReader, fn func(status string), force bool) error
|
||||
return nil
|
||||
}
|
||||
|
||||
func CreateManifest(name string, cfg *LayerReader, layers []*Layer) error {
|
||||
func CreateManifest(name string, cfg *LayerWithBuffer, layers []*Layer) error {
|
||||
mp := ParseModelPath(name)
|
||||
|
||||
manifest := ManifestV2{
|
||||
@@ -349,7 +332,7 @@ func CreateManifest(name string, cfg *LayerReader, layers []*Layer) error {
|
||||
return os.WriteFile(fp, manifestJSON, 0o644)
|
||||
}
|
||||
|
||||
func GetLayerWithBufferFromLayer(layer *Layer) (*LayerReader, error) {
|
||||
func GetLayerWithBufferFromLayer(layer *Layer) (*LayerWithBuffer, error) {
|
||||
fp, err := GetBlobsPath(layer.Digest)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -369,7 +352,7 @@ func GetLayerWithBufferFromLayer(layer *Layer) (*LayerReader, error) {
|
||||
return newLayer, nil
|
||||
}
|
||||
|
||||
func paramsToReader(params map[string]string) (io.ReadSeeker, error) {
|
||||
func paramsToReader(params map[string]string) (io.Reader, error) {
|
||||
opts := api.DefaultOptions()
|
||||
typeOpts := reflect.TypeOf(opts)
|
||||
|
||||
@@ -427,7 +410,7 @@ func paramsToReader(params map[string]string) (io.ReadSeeker, error) {
|
||||
return bytes.NewReader(bts), nil
|
||||
}
|
||||
|
||||
func getLayerDigests(layers []*LayerReader) ([]string, error) {
|
||||
func getLayerDigests(layers []*LayerWithBuffer) ([]string, error) {
|
||||
var digests []string
|
||||
for _, l := range layers {
|
||||
if l.Digest == "" {
|
||||
@@ -439,30 +422,34 @@ func getLayerDigests(layers []*LayerReader) ([]string, error) {
|
||||
}
|
||||
|
||||
// CreateLayer creates a Layer object from a given file
|
||||
func CreateLayer(f io.ReadSeeker) (*LayerReader, error) {
|
||||
digest, size := GetSHA256Digest(f)
|
||||
f.Seek(0, 0)
|
||||
func CreateLayer(f io.Reader) (*LayerWithBuffer, error) {
|
||||
buf := new(bytes.Buffer)
|
||||
_, err := io.Copy(buf, f)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
layer := &LayerReader{
|
||||
digest, size := GetSHA256Digest(buf)
|
||||
|
||||
layer := &LayerWithBuffer{
|
||||
Layer: Layer{
|
||||
MediaType: "application/vnd.docker.image.rootfs.diff.tar",
|
||||
Digest: digest,
|
||||
Size: size,
|
||||
},
|
||||
Reader: f,
|
||||
Buffer: buf,
|
||||
}
|
||||
|
||||
return layer, nil
|
||||
}
|
||||
|
||||
func PushModel(name, username, password string, fn func(api.ProgressResponse)) error {
|
||||
func PushModel(name, username, password string, fn func(status, digest string, Total, Completed int, Percent float64)) error {
|
||||
mp := ParseModelPath(name)
|
||||
|
||||
fn(api.ProgressResponse{Status: "retrieving manifest"})
|
||||
|
||||
fn("retrieving manifest", "", 0, 0, 0)
|
||||
manifest, err := GetManifest(mp)
|
||||
if err != nil {
|
||||
fn(api.ProgressResponse{Status: "couldn't retrieve manifest"})
|
||||
fn("couldn't retrieve manifest", "", 0, 0, 0)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -484,21 +471,11 @@ func PushModel(name, username, password string, fn func(api.ProgressResponse)) e
|
||||
|
||||
if exists {
|
||||
completed += layer.Size
|
||||
fn(api.ProgressResponse{
|
||||
Status: "using existing layer",
|
||||
Digest: layer.Digest,
|
||||
Total: total,
|
||||
Completed: completed,
|
||||
})
|
||||
fn("using existing layer", layer.Digest, total, completed, float64(completed)/float64(total))
|
||||
continue
|
||||
}
|
||||
|
||||
fn(api.ProgressResponse{
|
||||
Status: "starting upload",
|
||||
Digest: layer.Digest,
|
||||
Total: total,
|
||||
Completed: completed,
|
||||
})
|
||||
fn("starting upload", layer.Digest, total, completed, float64(completed)/float64(total))
|
||||
|
||||
location, err := startUpload(mp, username, password)
|
||||
if err != nil {
|
||||
@@ -512,19 +489,10 @@ func PushModel(name, username, password string, fn func(api.ProgressResponse)) e
|
||||
return err
|
||||
}
|
||||
completed += layer.Size
|
||||
fn(api.ProgressResponse{
|
||||
Status: "upload complete",
|
||||
Digest: layer.Digest,
|
||||
Total: total,
|
||||
Completed: completed,
|
||||
})
|
||||
fn("upload complete", layer.Digest, total, completed, float64(completed)/float64(total))
|
||||
}
|
||||
|
||||
fn(api.ProgressResponse{
|
||||
Status: "pushing manifest",
|
||||
Total: total,
|
||||
Completed: completed,
|
||||
})
|
||||
fn("pushing manifest", "", total, completed, float64(completed/total))
|
||||
url := fmt.Sprintf("%s://%s/v2/%s/manifests/%s", mp.ProtocolScheme, mp.Registry, mp.GetNamespaceRepository(), mp.Tag)
|
||||
headers := map[string]string{
|
||||
"Content-Type": "application/vnd.docker.distribution.manifest.v2+json",
|
||||
@@ -547,19 +515,15 @@ func PushModel(name, username, password string, fn func(api.ProgressResponse)) e
|
||||
return fmt.Errorf("registry responded with code %d: %v", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
fn(api.ProgressResponse{
|
||||
Status: "success",
|
||||
Total: total,
|
||||
Completed: completed,
|
||||
})
|
||||
fn("success", "", total, completed, 1.0)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func PullModel(name, username, password string, fn func(api.ProgressResponse)) error {
|
||||
func PullModel(name, username, password string, fn func(status, digest string, Total, Completed int, Percent float64)) error {
|
||||
mp := ParseModelPath(name)
|
||||
|
||||
fn(api.ProgressResponse{Status: "pulling manifest"})
|
||||
fn("pulling manifest", "", 0, 0, 0)
|
||||
|
||||
manifest, err := pullModelManifest(mp, username, password)
|
||||
if err != nil {
|
||||
@@ -577,15 +541,16 @@ func PullModel(name, username, password string, fn func(api.ProgressResponse)) e
|
||||
total += manifest.Config.Size
|
||||
|
||||
for _, layer := range layers {
|
||||
fn("starting download", layer.Digest, total, completed, float64(completed)/float64(total))
|
||||
if err := downloadBlob(mp, layer.Digest, username, password, fn); err != nil {
|
||||
fn(api.ProgressResponse{Status: fmt.Sprintf("error downloading: %v", err), Digest: layer.Digest})
|
||||
fn(fmt.Sprintf("error downloading: %v", err), layer.Digest, 0, 0, 0)
|
||||
return err
|
||||
}
|
||||
|
||||
completed += layer.Size
|
||||
fn("download complete", layer.Digest, total, completed, float64(completed)/float64(total))
|
||||
}
|
||||
|
||||
fn(api.ProgressResponse{Status: "writing manifest"})
|
||||
fn("writing manifest", "", total, completed, 1.0)
|
||||
|
||||
manifestJSON, err := json.Marshal(manifest)
|
||||
if err != nil {
|
||||
@@ -603,7 +568,7 @@ func PullModel(name, username, password string, fn func(api.ProgressResponse)) e
|
||||
return err
|
||||
}
|
||||
|
||||
fn(api.ProgressResponse{Status: "success"})
|
||||
fn("success", "", total, completed, 1.0)
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -635,7 +600,7 @@ func pullModelManifest(mp ModelPath, username, password string) (*ManifestV2, er
|
||||
return m, err
|
||||
}
|
||||
|
||||
func createConfigLayer(layers []string) (*LayerReader, error) {
|
||||
func createConfigLayer(layers []string) (*LayerWithBuffer, error) {
|
||||
// TODO change architecture and OS
|
||||
config := ConfigV2{
|
||||
Architecture: "arm64",
|
||||
@@ -654,26 +619,22 @@ func createConfigLayer(layers []string) (*LayerReader, error) {
|
||||
buf := bytes.NewBuffer(configJSON)
|
||||
digest, size := GetSHA256Digest(buf)
|
||||
|
||||
layer := &LayerReader{
|
||||
layer := &LayerWithBuffer{
|
||||
Layer: Layer{
|
||||
MediaType: "application/vnd.docker.container.image.v1+json",
|
||||
Digest: digest,
|
||||
Size: size,
|
||||
},
|
||||
Reader: buf,
|
||||
Buffer: buf,
|
||||
}
|
||||
return layer, nil
|
||||
}
|
||||
|
||||
// GetSHA256Digest returns the SHA256 hash of a given buffer and returns it, and the size of buffer
|
||||
func GetSHA256Digest(r io.Reader) (string, int) {
|
||||
h := sha256.New()
|
||||
n, err := io.Copy(h, r)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
return fmt.Sprintf("sha256:%x", h.Sum(nil)), int(n)
|
||||
func GetSHA256Digest(data *bytes.Buffer) (string, int) {
|
||||
layerBytes := data.Bytes()
|
||||
hash := sha256.Sum256(layerBytes)
|
||||
return "sha256:" + hex.EncodeToString(hash[:]), len(layerBytes)
|
||||
}
|
||||
|
||||
func startUpload(mp ModelPath, username string, password string) (string, error) {
|
||||
@@ -755,20 +716,16 @@ func uploadBlob(location string, layer *Layer, username string, password string)
|
||||
return nil
|
||||
}
|
||||
|
||||
func downloadBlob(mp ModelPath, digest string, username, password string, fn func(api.ProgressResponse)) error {
|
||||
func downloadBlob(mp ModelPath, digest string, username, password string, fn func(status, digest string, Total, Completed int, Percent float64)) error {
|
||||
fp, err := GetBlobsPath(digest)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if fi, _ := os.Stat(fp); fi != nil {
|
||||
_, err = os.Stat(fp)
|
||||
if !os.IsNotExist(err) {
|
||||
// we already have the file, so return
|
||||
fn(api.ProgressResponse{
|
||||
Digest: digest,
|
||||
Total: int(fi.Size()),
|
||||
Completed: int(fi.Size()),
|
||||
})
|
||||
|
||||
log.Printf("already have %s\n", digest)
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -817,21 +774,10 @@ func downloadBlob(mp ModelPath, digest string, username, password string, fn fun
|
||||
total := remaining + completed
|
||||
|
||||
for {
|
||||
fn(api.ProgressResponse{
|
||||
Status: fmt.Sprintf("downloading %s", digest),
|
||||
Digest: digest,
|
||||
Total: int(total),
|
||||
Completed: int(completed),
|
||||
})
|
||||
|
||||
fn(fmt.Sprintf("Downloading %s", digest), digest, int(total), int(completed), float64(completed)/float64(total))
|
||||
if completed >= total {
|
||||
if err := os.Rename(fp+"-partial", fp); err != nil {
|
||||
fn(api.ProgressResponse{
|
||||
Status: fmt.Sprintf("error renaming file: %v", err),
|
||||
Digest: digest,
|
||||
Total: int(total),
|
||||
Completed: int(completed),
|
||||
})
|
||||
fn(fmt.Sprintf("error renaming file: %v", err), digest, int(total), int(completed), 1)
|
||||
return err
|
||||
}
|
||||
|
||||
|
||||
@@ -91,25 +91,16 @@ func (mp ModelPath) GetManifestPath(createDir bool) (string, error) {
|
||||
return path, nil
|
||||
}
|
||||
|
||||
func GetManifestPath() (string, error) {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return filepath.Join(home, ".ollama", "models", "manifests"), nil
|
||||
}
|
||||
|
||||
func GetBlobsPath(digest string) (string, error) {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
path := filepath.Join(home, ".ollama", "models", "blobs", digest)
|
||||
path := filepath.Join(home, ".ollama", "models", "blobs")
|
||||
if err := os.MkdirAll(filepath.Dir(path), 0o755); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return path, nil
|
||||
return filepath.Join(path, digest), nil
|
||||
}
|
||||
|
||||
@@ -101,10 +101,15 @@ func pull(c *gin.Context) {
|
||||
ch := make(chan any)
|
||||
go func() {
|
||||
defer close(ch)
|
||||
fn := func(r api.ProgressResponse) {
|
||||
ch <- r
|
||||
fn := func(status, digest string, total, completed int, percent float64) {
|
||||
ch <- api.PullProgress{
|
||||
Status: status,
|
||||
Digest: digest,
|
||||
Total: total,
|
||||
Completed: completed,
|
||||
Percent: percent,
|
||||
}
|
||||
}
|
||||
|
||||
if err := PullModel(req.Name, req.Username, req.Password, fn); err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
@@ -124,10 +129,15 @@ func push(c *gin.Context) {
|
||||
ch := make(chan any)
|
||||
go func() {
|
||||
defer close(ch)
|
||||
fn := func(r api.ProgressResponse) {
|
||||
ch <- r
|
||||
fn := func(status, digest string, total, completed int, percent float64) {
|
||||
ch <- api.PushProgress{
|
||||
Status: status,
|
||||
Digest: digest,
|
||||
Total: total,
|
||||
Completed: completed,
|
||||
Percent: percent,
|
||||
}
|
||||
}
|
||||
|
||||
if err := PushModel(req.Name, req.Username, req.Password, fn); err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
@@ -171,52 +181,6 @@ func create(c *gin.Context) {
|
||||
streamResponse(c, ch)
|
||||
}
|
||||
|
||||
func list(c *gin.Context) {
|
||||
var models []api.ListResponseModel
|
||||
fp, err := GetManifestPath()
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
err = filepath.Walk(fp, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !info.IsDir() {
|
||||
fi, err := os.Stat(path)
|
||||
if err != nil {
|
||||
log.Printf("skipping file: %s", fp)
|
||||
return nil
|
||||
}
|
||||
path := path[len(fp)+1:]
|
||||
slashIndex := strings.LastIndex(path, "/")
|
||||
if slashIndex == -1 {
|
||||
return nil
|
||||
}
|
||||
tag := path[:slashIndex] + ":" + path[slashIndex+1:]
|
||||
mp := ParseModelPath(tag)
|
||||
manifest, err := GetManifest(mp)
|
||||
if err != nil {
|
||||
log.Printf("skipping file: %s", fp)
|
||||
return nil
|
||||
}
|
||||
model := api.ListResponseModel{
|
||||
Name: mp.GetShortTagname(),
|
||||
Size: manifest.GetTotalSize(),
|
||||
ModifiedAt: fi.ModTime(),
|
||||
}
|
||||
models = append(models, model)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, api.ListResponse{models})
|
||||
}
|
||||
|
||||
func Serve(ln net.Listener) error {
|
||||
r := gin.Default()
|
||||
|
||||
@@ -228,7 +192,6 @@ func Serve(ln net.Listener) error {
|
||||
r.POST("/api/generate", generate)
|
||||
r.POST("/api/create", create)
|
||||
r.POST("/api/push", push)
|
||||
r.GET("/api/tags", list)
|
||||
|
||||
log.Printf("Listening on %s", ln.Addr())
|
||||
s := &http.Server{
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import Header from '../header'
|
||||
import Downloader from './downloader'
|
||||
import Signup from './signup'
|
||||
|
||||
@@ -27,19 +26,22 @@ export default async function Download() {
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<Header />
|
||||
<main className='flex min-h-screen max-w-6xl flex-col py-20 px-16 lg:p-32 items-center mx-auto'>
|
||||
<img src='/ollama.png' className='w-16 h-auto' />
|
||||
<section className='mt-12 mb-8 text-center'>
|
||||
<h2 className='my-2 max-w-md text-3xl tracking-tight'>Downloading...</h2>
|
||||
<h3 className='text-base text-neutral-500 mt-12 max-w-[16rem]'>
|
||||
While Ollama downloads, sign up to get notified of new updates.
|
||||
</h3>
|
||||
<Downloader url={asset.browser_download_url} />
|
||||
</section>
|
||||
<main className='flex min-h-screen max-w-2xl flex-col p-4 lg:p-24 items-center mx-auto'>
|
||||
<img src='/ollama.png' className='w-16 h-auto' />
|
||||
<section className='my-12 text-center'>
|
||||
<h2 className='my-2 max-w-md text-3xl tracking-tight'>Downloading Ollama</h2>
|
||||
<h3 className='text-sm text-neutral-500'>
|
||||
Problems downloading?{' '}
|
||||
<a href={asset.browser_download_url} className='underline'>
|
||||
Try again
|
||||
</a>
|
||||
</h3>
|
||||
<Downloader url={asset.browser_download_url} />
|
||||
</section>
|
||||
<section className='max-w-sm flex flex-col w-full items-center border border-neutral-200 rounded-xl px-8 pt-8 pb-2'>
|
||||
<p className='text-lg leading-tight text-center mb-6 max-w-[260px]'>Sign up for updates</p>
|
||||
<Signup />
|
||||
</main>
|
||||
</>
|
||||
</section>
|
||||
</main>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ export default function Signup() {
|
||||
|
||||
return false
|
||||
}}
|
||||
className='flex self-stretch flex-col gap-3 h-32 md:mx-40 lg:mx-72'
|
||||
className='flex self-stretch flex-col gap-3 h-32'
|
||||
>
|
||||
<input
|
||||
required
|
||||
@@ -37,13 +37,13 @@ export default function Signup() {
|
||||
onChange={e => setEmail(e.target.value)}
|
||||
type='email'
|
||||
placeholder='your@email.com'
|
||||
className='border border-neutral-200 rounded-lg px-4 py-2 focus:outline-none placeholder-neutral-300'
|
||||
className='bg-neutral-100 rounded-lg px-4 py-2 focus:outline-none placeholder-neutral-500'
|
||||
/>
|
||||
<input
|
||||
type='submit'
|
||||
value='Get updates'
|
||||
disabled={submitting}
|
||||
className='bg-black text-white disabled:text-neutral-200 disabled:bg-neutral-700 rounded-full px-4 py-2 focus:outline-none cursor-pointer'
|
||||
className='bg-black text-white disabled:text-neutral-200 disabled:bg-neutral-700 rounded-lg px-4 py-2 focus:outline-none cursor-pointer'
|
||||
/>
|
||||
{success && <p className='text-center text-sm'>You're signed up for updates</p>}
|
||||
</form>
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
const navigation = [
|
||||
{ name: 'Discord', href: 'https://discord.gg/MrfB5FbNWN' },
|
||||
{ name: 'GitHub', href: 'https://github.com/jmorganca/ollama' },
|
||||
{ name: 'Download', href: '/download' },
|
||||
]
|
||||
|
||||
export default function Header() {
|
||||
return (
|
||||
<header className='absolute inset-x-0 top-0 z-50'>
|
||||
<nav className='mx-auto flex items-center justify-between px-10 py-4'>
|
||||
<a className='flex-1 font-bold' href='/'>
|
||||
Ollama
|
||||
</a>
|
||||
<div className='flex space-x-8'>
|
||||
{navigation.map(item => (
|
||||
<a key={item.name} href={item.href} className='text-sm leading-6 text-gray-900'>
|
||||
{item.name}
|
||||
</a>
|
||||
))}
|
||||
</div>
|
||||
</nav>
|
||||
</header>
|
||||
)
|
||||
}
|
||||
@@ -1,32 +1,34 @@
|
||||
import { AiFillApple } from 'react-icons/ai'
|
||||
|
||||
import models from '../../models.json'
|
||||
import Header from './header'
|
||||
|
||||
export default async function Home() {
|
||||
return (
|
||||
<>
|
||||
<Header />
|
||||
<main className='flex min-h-screen max-w-6xl flex-col py-20 px-16 md:p-32 items-center mx-auto'>
|
||||
<img src='/ollama.png' className='w-16 h-auto' />
|
||||
<section className='my-12 text-center'>
|
||||
<div className='flex flex-col space-y-2'>
|
||||
<h2 className='md:max-w-[18rem] mx-auto my-2 text-3xl tracking-tight'>Portable large language models</h2>
|
||||
<h3 className='md:max-w-xs mx-auto text-base text-neutral-500'>
|
||||
Bundle a model’s weights, configuration, prompts, data and more into self-contained packages that run anywhere.
|
||||
</h3>
|
||||
<main className='flex min-h-screen max-w-2xl flex-col p-4 lg:p-24'>
|
||||
<img src='/ollama.png' className='w-16 h-auto' />
|
||||
<section className='my-4'>
|
||||
<p className='my-3 max-w-md'>
|
||||
<a className='underline' href='https://github.com/jmorganca/ollama'>
|
||||
Ollama
|
||||
</a>{' '}
|
||||
is a tool for running large language models, currently for macOS with Windows and Linux coming soon.
|
||||
<br />
|
||||
<br />
|
||||
<a href='/download'>
|
||||
<button className='bg-black text-white text-sm py-2 px-3 rounded-lg flex items-center gap-2'>
|
||||
<AiFillApple className='h-auto w-5 relative -top-px' /> Download for macOS
|
||||
</button>
|
||||
</a>
|
||||
</p>
|
||||
</section>
|
||||
<section className='my-4'>
|
||||
<h2 className='mb-4 text-lg'>Example models you can try running:</h2>
|
||||
{models.map(m => (
|
||||
<div className='my-2 grid font-mono' key={m.name}>
|
||||
<code className='py-0.5'>ollama run {m.name}</code>
|
||||
</div>
|
||||
<div className='mx-auto flex flex-col space-y-4 mt-12'>
|
||||
<a href='/download' className='md:mx-10 lg:mx-14 bg-black text-white rounded-full px-4 py-2 focus:outline-none cursor-pointer'>
|
||||
Download
|
||||
</a>
|
||||
<p className='text-neutral-500 text-sm '>
|
||||
Available for macOS with Apple Silicon <br />
|
||||
Windows & Linux support coming soon.
|
||||
</p>
|
||||
</div>
|
||||
</section>
|
||||
</main>
|
||||
</>
|
||||
))}
|
||||
</section>
|
||||
</main>
|
||||
)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user