Compare commits

..

1 Commits

Author SHA1 Message Date
Patrick Devine
206fab0e15 add license layers to the parser 2023-07-18 22:44:35 -07:00
6 changed files with 31 additions and 38 deletions

View File

@@ -52,17 +52,14 @@ Hello! It's your friend Mario.
## Model library
Ollama includes a library of open-source, pre-trained models. More models are coming soon. You should have at least 8 GB of RAM to run the 3B models, 16 GB
to run the 7B models, and 32 GB to run the 13B models.
Ollama includes a library of open-source, pre-trained models. More models are coming soon.
| Model | Parameters | Size | Download |
| ---------------------- | ---------- | ----- | --------------------------- |
| Llama2 | 7B | 3.8GB | `ollama pull llama2` |
| Llama2 13B | 13B | 7.3GB | `ollama pull llama2:13b` |
| Orca Mini | 3B | 1.9GB | `ollama pull orca` |
| Vicuna | 7B | 3.8GB | `ollama pull vicuna` |
| Nous-Hermes | 13B | 7.3GB | `ollama pull nous-hermes` |
| Wizard Vicuna Uncensored | 13B | 7.3GB | `ollama pull wizard-vicuna` |
| Model | Parameters | Size | Download |
| ----------- | ---------- | ----- | ------------------------- |
| Llama2 | 7B | 3.8GB | `ollama pull llama2` |
| Orca Mini | 3B | 1.9GB | `ollama pull orca` |
| Vicuna | 7B | 3.8GB | `ollama pull vicuna` |
| Nous-Hermes | 13B | 7.3GB | `ollama pull nous-hermes` |
## Building
@@ -73,7 +70,7 @@ go build .
To run it start the server:
```
./ollama serve &
./ollama server &
```
Finally, run a model!

View File

@@ -13,9 +13,7 @@ export function installed() {
}
export async function install() {
const command = `do shell script "mkdir -p ${path.dirname(
symlinkPath
)} && ln -F -s ${ollama} ${symlinkPath}" with administrator privileges`
const command = `do shell script "ln -F -s ${ollama} ${symlinkPath}" with administrator privileges`
try {
await exec(`osascript -e '${command}'`)

View File

@@ -651,7 +651,8 @@ func createConfigLayer(layers []string) (*LayerReader, error) {
return nil, err
}
digest, size := GetSHA256Digest(bytes.NewBuffer(configJSON))
buf := bytes.NewBuffer(configJSON)
digest, size := GetSHA256Digest(buf)
layer := &LayerReader{
Layer: Layer{
@@ -659,7 +660,7 @@ func createConfigLayer(layers []string) (*LayerReader, error) {
Digest: digest,
Size: size,
},
Reader: bytes.NewBuffer(configJSON),
Reader: buf,
}
return layer, nil
}

View File

@@ -1,5 +1,3 @@
import Image from 'next/image'
import Header from '../header'
import Downloader from './downloader'
import Signup from './signup'
@@ -32,7 +30,7 @@ export default async function Download() {
<>
<Header />
<main className='flex min-h-screen max-w-6xl flex-col py-20 px-16 lg:p-32 items-center mx-auto'>
<Image src='/ollama.png' width={64} height={64} alt='ollamaIcon' />
<img src='/ollama.png' className='w-16 h-auto' />
<section className='mt-12 mb-8 text-center'>
<h2 className='my-2 max-w-md text-3xl tracking-tight'>Downloading...</h2>
<h3 className='text-base text-neutral-500 mt-12 max-w-[16rem]'>

View File

@@ -1,25 +1,24 @@
import Link from "next/link"
const navigation = [
{ name: 'Github', href: 'https://github.com/jmorganca/ollama' },
{ name: 'Discord', href: 'https://discord.gg/MrfB5FbNWN' },
{ name: 'GitHub', href: 'https://github.com/jmorganca/ollama' },
{ name: 'Download', href: '/download' },
]
export default function Header() {
export default function Header() {
return (
<header className="absolute inset-x-0 top-0 z-50">
<nav className="mx-auto flex items-center justify-between px-10 py-4">
<Link className="flex-1 font-bold" href="/">
<header className='absolute inset-x-0 top-0 z-50'>
<nav className='mx-auto flex items-center justify-between px-10 py-4'>
<a className='flex-1 font-bold' href='/'>
Ollama
</Link>
<div className="flex space-x-8">
{navigation.map((item) => (
<Link key={item.name} href={item.href} className="text-sm leading-6 text-gray-900">
</a>
<div className='flex space-x-8'>
{navigation.map(item => (
<a key={item.name} href={item.href} className='text-sm leading-6 text-gray-900'>
{item.name}
</Link>
</a>
))}
</div>
</nav>
</header >
</header>
)
}
}

View File

@@ -1,6 +1,6 @@
import Image from 'next/image'
import Link from 'next/link'
import { AiFillApple } from 'react-icons/ai'
import models from '../../models.json'
import Header from './header'
export default async function Home() {
@@ -8,7 +8,7 @@ export default async function Home() {
<>
<Header />
<main className='flex min-h-screen max-w-6xl flex-col py-20 px-16 md:p-32 items-center mx-auto'>
<Image src='/ollama.png' width={64} height={64} alt='ollamaIcon' />
<img src='/ollama.png' className='w-16 h-auto' />
<section className='my-12 text-center'>
<div className='flex flex-col space-y-2'>
<h2 className='md:max-w-[18rem] mx-auto my-2 text-3xl tracking-tight'>Portable large language models</h2>
@@ -17,9 +17,9 @@ export default async function Home() {
</h3>
</div>
<div className='mx-auto flex flex-col space-y-4 mt-12'>
<Link href='/download' className='md:mx-10 lg:mx-14 bg-black text-white rounded-full px-4 py-2 focus:outline-none cursor-pointer'>
<a href='/download' className='md:mx-10 lg:mx-14 bg-black text-white rounded-full px-4 py-2 focus:outline-none cursor-pointer'>
Download
</Link>
</a>
<p className='text-neutral-500 text-sm '>
Available for macOS with Apple Silicon <br />
Windows & Linux support coming soon.