mirror of
https://github.com/ollama/ollama.git
synced 2026-01-13 09:59:08 -05:00
Compare commits
9 Commits
insecure-r
...
delete-fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1758d36563 | ||
|
|
9f6e97865c | ||
|
|
f5f0da06d9 | ||
|
|
52f04e39f2 | ||
|
|
3c8f4c03d7 | ||
|
|
7ba1308595 | ||
|
|
91cd54016c | ||
|
|
09dc6273e3 | ||
|
|
ebaa33ac28 |
10
README.md
10
README.md
@@ -125,3 +125,13 @@ Finally, run a model!
|
||||
```
|
||||
./ollama run llama2
|
||||
```
|
||||
|
||||
## REST API
|
||||
|
||||
### `POST /api/generate`
|
||||
|
||||
Generate text from a model.
|
||||
|
||||
```
|
||||
curl -X POST http://localhost:11434/api/generate -d '{"model": "llama2", "prompt":"Why is the sky blue?"}'
|
||||
```
|
||||
|
||||
@@ -27,7 +27,7 @@ func checkError(resp *http.Response, body []byte) error {
|
||||
err := json.Unmarshal(body, &apiError)
|
||||
if err != nil {
|
||||
// Use the full body as the message if we fail to decode a response.
|
||||
apiError.Message = string(body)
|
||||
apiError.ErrorMessage = string(body)
|
||||
}
|
||||
|
||||
return apiError
|
||||
@@ -92,7 +92,6 @@ func (c *Client) do(ctx context.Context, method, path string, reqData, respData
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
func (c *Client) stream(ctx context.Context, method, path string, data any, fn func([]byte) error) error {
|
||||
@@ -137,9 +136,9 @@ func (c *Client) stream(ctx context.Context, method, path string, data any, fn f
|
||||
|
||||
if response.StatusCode >= 400 {
|
||||
return StatusError{
|
||||
StatusCode: response.StatusCode,
|
||||
Status: response.Status,
|
||||
Message: errorResponse.Error,
|
||||
StatusCode: response.StatusCode,
|
||||
Status: response.Status,
|
||||
ErrorMessage: errorResponse.Error,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
19
api/types.go
19
api/types.go
@@ -8,16 +8,23 @@ import (
|
||||
)
|
||||
|
||||
type StatusError struct {
|
||||
StatusCode int
|
||||
Status string
|
||||
Message string
|
||||
StatusCode int
|
||||
Status string
|
||||
ErrorMessage string `json:"error"`
|
||||
}
|
||||
|
||||
func (e StatusError) Error() string {
|
||||
if e.Message != "" {
|
||||
return fmt.Sprintf("%s: %s", e.Status, e.Message)
|
||||
switch {
|
||||
case e.Status != "" && e.ErrorMessage != "":
|
||||
return fmt.Sprintf("%s: %s", e.Status, e.ErrorMessage)
|
||||
case e.Status != "":
|
||||
return e.Status
|
||||
case e.ErrorMessage != "":
|
||||
return e.ErrorMessage
|
||||
default:
|
||||
// this should not happen
|
||||
return "something went wrong, please see the ollama server logs for details"
|
||||
}
|
||||
return e.Status
|
||||
}
|
||||
|
||||
type GenerateRequest struct {
|
||||
|
||||
@@ -6,6 +6,12 @@ Install required tools:
|
||||
brew install go
|
||||
```
|
||||
|
||||
Enable CGO:
|
||||
|
||||
```
|
||||
export CGO_ENABLED=1
|
||||
```
|
||||
|
||||
Then build ollama:
|
||||
|
||||
```
|
||||
|
||||
@@ -548,9 +548,13 @@ func DeleteModel(name string, fn func(api.ProgressResponse)) error {
|
||||
// only delete the files which are still in the deleteMap
|
||||
for k, v := range deleteMap {
|
||||
if v {
|
||||
err := os.Remove(k)
|
||||
fp, err := GetBlobsPath(k)
|
||||
if err != nil {
|
||||
log.Printf("couldn't remove file '%s': %v", k, err)
|
||||
log.Printf("couldn't get file path for '%s': %v", k, err)
|
||||
continue
|
||||
}
|
||||
if err := os.Remove(fp); err != nil {
|
||||
log.Printf("couldn't remove file '%s': %v", fp, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package server
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"log"
|
||||
"net"
|
||||
@@ -190,6 +191,10 @@ func ListModelsHandler(c *gin.Context) {
|
||||
}
|
||||
err = filepath.Walk(fp, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
log.Printf("manifest file does not exist: %s", fp)
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
if !info.IsDir() {
|
||||
|
||||
@@ -11,18 +11,23 @@ export default async function Home() {
|
||||
<Image src='/ollama.png' width={64} height={64} alt='ollamaIcon' />
|
||||
<section className='my-12 text-center'>
|
||||
<div className='flex flex-col space-y-2'>
|
||||
<h2 className='md:max-w-[18rem] mx-auto my-2 text-3xl tracking-tight'>Portable large language models</h2>
|
||||
<h2 className='md:max-w-md mx-auto my-2 text-3xl tracking-tight'>
|
||||
Get up and running with large language models, locally.
|
||||
</h2>
|
||||
<h3 className='md:max-w-xs mx-auto text-base text-neutral-500'>
|
||||
Bundle a model’s weights, configuration, prompts, data and more into self-contained packages that run anywhere.
|
||||
Run Llama 2 and other models on macOS. Customize and create your own.
|
||||
</h3>
|
||||
</div>
|
||||
<div className='mx-auto flex flex-col space-y-4 mt-12'>
|
||||
<Link href='/download' className='md:mx-10 lg:mx-14 bg-black text-white rounded-full px-4 py-2 focus:outline-none cursor-pointer'>
|
||||
<div className='mx-auto max-w-xs flex flex-col space-y-4 mt-12'>
|
||||
<Link
|
||||
href='/download'
|
||||
className='md:mx-10 lg:mx-14 bg-black text-white rounded-full px-4 py-2 focus:outline-none cursor-pointer'
|
||||
>
|
||||
Download
|
||||
</Link>
|
||||
<p className='text-neutral-500 text-sm '>
|
||||
Available for macOS with Apple Silicon <br />
|
||||
Windows & Linux support coming soon.
|
||||
Available for macOS with Apple Silicon <br />
|
||||
Windows & Linux support coming soon.
|
||||
</p>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
Reference in New Issue
Block a user