Compare commits

...

24 Commits

Author SHA1 Message Date
Jeffrey Morgan
660dee7005 hourly heartbeat 2023-07-08 13:18:34 -04:00
Jeffrey Morgan
a678a7235a shorter README.md 2023-07-07 23:54:50 -04:00
Jeffrey Morgan
74e92d1258 add basic / route for server 2023-07-07 23:46:15 -04:00
Jeffrey Morgan
ea809df196 update Dockerfile to use OLLAMA_HOST 2023-07-07 23:43:50 -04:00
Bruce MacDonald
0bee4a8c07 if directory cannot be resolved, do not fail 2023-07-07 23:18:25 -04:00
Bruce MacDonald
f533f85d44 pr feedback
- move error check to api client pull
- simplify error check in generate
- return nil on any pull error
2023-07-07 17:12:02 -04:00
Jeffrey Morgan
553c884474 allow specifying server host and port with OLLAMA_HOST and OLLAMA_PORT 2023-07-07 16:48:13 -04:00
Jeffrey Morgan
b245f5af8f web:open download page in new tab 2023-07-07 16:29:58 -04:00
Jeffrey Morgan
39e946f256 fix download url 2023-07-07 16:07:19 -04:00
Bruce MacDonald
049295d9ba Update README.md 2023-07-07 15:33:50 -04:00
Bruce MacDonald
61dd87bd90 if directory cannot be resolved, do not fail 2023-07-07 15:27:43 -04:00
Jeffrey Morgan
12199bcfa8 update download links to the releases page until we have a better download url 2023-07-07 15:21:40 -04:00
Bruce MacDonald
b24be8c6b3 update directory url 2023-07-07 15:13:41 -04:00
Michael Yang
ba2bade0d5 Merge pull request #55 from jmorganca/fix-run-generate
fix run generate
2023-07-07 11:37:56 -07:00
Michael Yang
303982b56e fix run generate 2023-07-07 11:36:29 -07:00
Michael Yang
0fea50cd42 Merge pull request #54 from jmorganca/empty-line
no prompt on empty line
2023-07-07 11:29:39 -07:00
Bruce MacDonald
c316893545 fix resume download 2023-07-07 14:26:58 -04:00
Michael Yang
053739d19f no prompt on empty line 2023-07-07 11:01:44 -07:00
Patrick Devine
3d73ad0c56 Merge pull request #52 from jmorganca/go-opts
pass model and predict options
2023-07-07 10:59:11 -07:00
Eva Ho
bc54daf2bc add app to open at login by default 2023-07-07 13:49:42 -04:00
Jeffrey Morgan
8b57e715a7 add version 2023-07-07 13:44:36 -04:00
Jeffrey Morgan
1358e27b77 add publish script 2023-07-07 12:59:45 -04:00
Jeffrey Morgan
7406881eeb write version at build time 2023-07-07 12:59:45 -04:00
Bruce MacDonald
962d351281 remove replit example which does not run currently 2023-07-07 12:39:42 -04:00
23 changed files with 316 additions and 157 deletions

View File

@@ -13,4 +13,5 @@ ARG GROUP=ollama
RUN addgroup -g 1000 $GROUP && adduser -u 1000 -DG $GROUP $USER
USER $USER:$GROUP
ENTRYPOINT ["/bin/ollama"]
ENV OLLAMA_HOST 0.0.0.0
CMD ["serve"]

View File

@@ -11,7 +11,8 @@ ollama: llama
.PHONY: app
app: ollama
npm run --prefix app make
npm install --prefix app
npm run --prefix app make:sign
clean:
go clean

View File

@@ -4,7 +4,7 @@
Run large language models with `llama.cpp`.
> Note: certain models that can be run with this project are intended for research and/or non-commercial use only.
> Note: certain models that can be run with Ollama are intended for research and/or non-commercial use only.
### Features
@@ -16,7 +16,7 @@ Run large language models with `llama.cpp`.
## Install
- Download for macOS
- [Download](https://ollama.ai/download) for macOS
- Download for Windows (coming soon)
- Docker: `docker run -p 11434:11434 ollama/ollama`
@@ -24,10 +24,10 @@ You can also build the [binary from source](#building).
## Quickstart
Run the model that started it all.
Run a fast and simple model.
```
ollama run llama
ollama run orca
```
## Example models
@@ -48,14 +48,6 @@ Ask questions. Get answers.
ollama run orca "Write an email to my boss."
```
### 👩‍💻 Code completion
Sometimes you just need a little help writing code.
```
ollama run replit "Give me react code to render a button"
```
### 📖 Storytelling
Venture into the unknown.

View File

@@ -106,6 +106,11 @@ func (c *Client) Pull(ctx context.Context, req *PullRequest, fn PullProgressFunc
return err
}
if resp.Error.Message != "" {
// couldn't pull the model from the directory, proceed anyway
return nil
}
return fn(resp)
}),
)

View File

@@ -26,61 +26,62 @@ type PullProgress struct {
Total int64 `json:"total"`
Completed int64 `json:"completed"`
Percent float64 `json:"percent"`
Error Error `json:"error"`
}
type GenerateRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
ModelOptions `json:"model_opts"`
PredictOptions `json:"predict_opts"`
ModelOptions `json:"model_opts,omitempty"`
PredictOptions `json:"predict_opts,omitempty"`
}
type ModelOptions struct {
ContextSize int `json:"context_size"`
Seed int `json:"seed"`
NBatch int `json:"n_batch"`
F16Memory bool `json:"memory_f16"`
MLock bool `json:"mlock"`
MMap bool `json:"mmap"`
VocabOnly bool `json:"vocab_only"`
LowVRAM bool `json:"low_vram"`
Embeddings bool `json:"embeddings"`
NUMA bool `json:"numa"`
NGPULayers int `json:"gpu_layers"`
MainGPU string `json:"main_gpu"`
TensorSplit string `json:"tensor_split"`
ContextSize int `json:"context_size,omitempty"`
Seed int `json:"seed,omitempty"`
NBatch int `json:"n_batch,omitempty"`
F16Memory bool `json:"memory_f16,omitempty"`
MLock bool `json:"mlock,omitempty"`
MMap bool `json:"mmap,omitempty"`
VocabOnly bool `json:"vocab_only,omitempty"`
LowVRAM bool `json:"low_vram,omitempty"`
Embeddings bool `json:"embeddings,omitempty"`
NUMA bool `json:"numa,omitempty"`
NGPULayers int `json:"gpu_layers,omitempty"`
MainGPU string `json:"main_gpu,omitempty"`
TensorSplit string `json:"tensor_split,omitempty"`
}
type PredictOptions struct {
Seed int `json:"seed"`
Threads int `json:"threads"`
Tokens int `json:"tokens"`
TopK int `json:"top_k"`
Repeat int `json:"repeat"`
Batch int `json:"batch"`
NKeep int `json:"nkeep"`
TopP float64 `json:"top_p"`
Temperature float64 `json:"temp"`
Penalty float64 `json:"penalty"`
Seed int `json:"seed,omitempty"`
Threads int `json:"threads,omitempty"`
Tokens int `json:"tokens,omitempty"`
TopK int `json:"top_k,omitempty"`
Repeat int `json:"repeat,omitempty"`
Batch int `json:"batch,omitempty"`
NKeep int `json:"nkeep,omitempty"`
TopP float64 `json:"top_p,omitempty"`
Temperature float64 `json:"temp,omitempty"`
Penalty float64 `json:"penalty,omitempty"`
F16KV bool
DebugMode bool
StopPrompts []string
IgnoreEOS bool `json:"ignore_eos"`
IgnoreEOS bool `json:"ignore_eos,omitempty"`
TailFreeSamplingZ float64 `json:"tfs_z"`
TypicalP float64 `json:"typical_p"`
FrequencyPenalty float64 `json:"freq_penalty"`
PresencePenalty float64 `json:"pres_penalty"`
Mirostat int `json:"mirostat"`
MirostatETA float64 `json:"mirostat_lr"`
MirostatTAU float64 `json:"mirostat_ent"`
PenalizeNL bool `json:"penalize_nl"`
LogitBias string `json:"logit_bias"`
TailFreeSamplingZ float64 `json:"tfs_z,omitempty"`
TypicalP float64 `json:"typical_p,omitempty"`
FrequencyPenalty float64 `json:"freq_penalty,omitempty"`
PresencePenalty float64 `json:"pres_penalty,omitempty"`
Mirostat int `json:"mirostat,omitempty"`
MirostatETA float64 `json:"mirostat_lr,omitempty"`
MirostatTAU float64 `json:"mirostat_ent,omitempty"`
PenalizeNL bool `json:"penalize_nl,omitempty"`
LogitBias string `json:"logit_bias,omitempty"`
PathPromptCache string
MLock bool `json:"mlock"`
MMap bool `json:"mmap"`
MLock bool `json:"mlock,omitempty"`
MMap bool `json:"mmap,omitempty"`
PromptCacheAll bool
PromptCacheRO bool
MainGPU string

View File

@@ -1,4 +1,4 @@
import type { ForgeConfig } from '@electron-forge/shared-types'
import type { ForgeConfig, ResolvedForgeConfig, ForgeMakeResult } from '@electron-forge/shared-types'
import { MakerSquirrel } from '@electron-forge/maker-squirrel'
import { MakerZIP } from '@electron-forge/maker-zip'
import { PublisherGithub } from '@electron-forge/publisher-github'
@@ -49,6 +49,11 @@ const config: ForgeConfig = {
prerelease: true,
}),
],
hooks: {
readPackageJson: async (_, packageJson) => {
return { ...packageJson, version: process.env.VERSION || packageJson.version }
},
},
plugins: [
new AutoUnpackNativesPlugin({}),
new WebpackPlugin({

View File

@@ -1,5 +1,6 @@
import { spawn, exec } from 'child_process'
import { app, autoUpdater, dialog, Tray, Menu } from 'electron'
import Store from 'electron-store'
import * as path from 'path'
import * as fs from 'fs'
@@ -7,6 +8,7 @@ import { analytics, id } from './telemetry'
require('@electron/remote/main').initialize()
const store = new Store()
let tray: Tray | null = null
const SingleInstanceLock = app.requestSingleInstanceLock()
@@ -15,7 +17,6 @@ if (!SingleInstanceLock) {
}
const createSystemtray = () => {
let iconPath = path.join(__dirname, '..', '..', 'assets', 'ollama_icon_16x16Template.png')
if (app.isPackaged) {
@@ -84,7 +85,7 @@ function installCLI() {
.showMessageBox({
type: 'info',
title: 'Ollama CLI installation',
message: 'To make the Ollama command line work in your terminal, it needs administrator privileges.',
message: 'To make the Ollama command work in your terminal, it needs administrator privileges.',
buttons: ['OK'],
})
.then(result => {
@@ -108,6 +109,15 @@ app.on('ready', () => {
if (process.platform === 'darwin') {
app.dock.hide()
if (!store.has('first-time-run')) {
// This is the first run
app.setLoginItemSettings({ openAtLogin: true })
store.set('first-time-run', false)
} else {
// The app has been run before
app.setLoginItemSettings({ openAtLogin: app.getLoginItemSettings().openAtLogin })
}
if (!app.isInApplicationsFolder()) {
const chosen = dialog.showMessageBoxSync({
type: 'question',
@@ -167,6 +177,9 @@ async function heartbeat() {
analytics.track({
anonymousId: id(),
event: 'heartbeat',
properties: {
version: app.getVersion(),
},
})
}
@@ -178,7 +191,7 @@ if (app.isPackaged) {
setInterval(() => {
heartbeat()
autoUpdater.checkForUpdates()
}, 60000)
}, 60 * 60 * 1000)
}
autoUpdater.on('error', e => {

View File

@@ -9,6 +9,7 @@ import (
"net"
"os"
"path"
"strings"
"time"
"github.com/schollz/progressbar/v3"
@@ -43,27 +44,23 @@ func RunRun(cmd *cobra.Command, args []string) error {
}
func pull(model string) error {
// TODO: check if the local model is up to date with remote
_, err := os.Stat(cacheDir() + "/models/" + model + ".bin")
switch {
case errors.Is(err, os.ErrNotExist):
client := api.NewClient()
var bar *progressbar.ProgressBar
return client.Pull(
context.Background(),
&api.PullRequest{Model: model},
func(progress api.PullProgress) error {
if bar == nil {
bar = progressbar.DefaultBytes(progress.Total)
}
client := api.NewClient()
var bar *progressbar.ProgressBar
return client.Pull(
context.Background(),
&api.PullRequest{Model: model},
func(progress api.PullProgress) error {
if bar == nil && progress.Percent == 100 {
// already downloaded
return nil
}
if bar == nil {
bar = progressbar.DefaultBytes(progress.Total)
}
return bar.Set64(progress.Completed)
},
)
case err != nil:
return err
}
return nil
return bar.Set64(progress.Completed)
},
)
}
func RunGenerate(_ *cobra.Command, args []string) error {
@@ -79,38 +76,41 @@ func RunGenerate(_ *cobra.Command, args []string) error {
}
func generate(model, prompt string) error {
client := api.NewClient()
if len(strings.TrimSpace(prompt)) > 0 {
client := api.NewClient()
spinner := progressbar.NewOptions(-1,
progressbar.OptionSetWriter(os.Stderr),
progressbar.OptionThrottle(60*time.Millisecond),
progressbar.OptionSpinnerType(14),
progressbar.OptionSetRenderBlankState(true),
progressbar.OptionSetElapsedTime(false),
progressbar.OptionClearOnFinish(),
)
spinner := progressbar.NewOptions(-1,
progressbar.OptionSetWriter(os.Stderr),
progressbar.OptionThrottle(60*time.Millisecond),
progressbar.OptionSpinnerType(14),
progressbar.OptionSetRenderBlankState(true),
progressbar.OptionSetElapsedTime(false),
progressbar.OptionClearOnFinish(),
)
go func() {
for range time.Tick(60 * time.Millisecond) {
if spinner.IsFinished() {
break
go func() {
for range time.Tick(60 * time.Millisecond) {
if spinner.IsFinished() {
break
}
spinner.Add(1)
}
}()
client.Generate(context.Background(), &api.GenerateRequest{Model: model, Prompt: prompt}, func(resp api.GenerateResponse) error {
if !spinner.IsFinished() {
spinner.Finish()
}
spinner.Add(1)
}
}()
fmt.Print(resp.Response)
return nil
})
client.Generate(context.Background(), &api.GenerateRequest{Model: model, Prompt: prompt}, func(resp api.GenerateResponse) error {
if !spinner.IsFinished() {
spinner.Finish()
}
fmt.Println()
fmt.Println()
}
fmt.Print(resp.Response)
return nil
})
fmt.Println()
fmt.Println()
return nil
}
@@ -153,7 +153,17 @@ func generateBatch(model string) error {
}
func RunServer(_ *cobra.Command, _ []string) error {
ln, err := net.Listen("tcp", "127.0.0.1:11434")
host := os.Getenv("OLLAMA_HOST")
if host == "" {
host = "127.0.0.1"
}
port := os.Getenv("OLLAMA_PORT")
if port == "" {
port = "11434"
}
ln, err := net.Listen("tcp", fmt.Sprintf("%s:%s", host, port))
if err != nil {
return err
}

View File

@@ -1,33 +1,40 @@
# Development
ollama is built using Python 3 and uses [Poetry](https://python-poetry.org/) to manage dependencies and build packages.
Install required tools:
```
pip install poetry
brew install cmake go node
```
Install ollama and its dependencies:
Then run `make`:
```
poetry install --extras server --with dev
make
```
Run ollama server:
Now you can run `ollama`:
```
poetry run ollama server
./ollama
```
Update dependencies:
## Releasing
To release a new version of Ollama you'll need to set some environment variables:
* `GITHUB_TOKEN`: your GitHub token
* `APPLE_IDENTITY`: the Apple signing identity (macOS only)
* `APPLE_ID`: your Apple ID
* `APPLE_PASSWORD`: your Apple ID app-specific password
* `APPLE_TEAM_ID`: the Apple team ID for the signing identity
* `TELEMETRY_WRITE_KEY`: segment write key for telemetry
Then run the publish script with the target version:
```
poetry update --extras server --with dev
poetry lock
poetry export >requirements.txt
VERSION=0.0.2 ./scripts/publish.sh
```
Build binary package:
```
poetry build
```

View File

@@ -11,18 +11,6 @@
"original_url": "https://huggingface.co/psmathur/orca_mini_3b",
"license": "CC-BY-SA-4.0"
},
{
"name": "replit",
"display_name": "Replit Code",
"parameters": "3B",
"url": "https://huggingface.co/nomic-ai/ggml-replit-code-v1-3b/resolve/main/ggml-replit-code-v1-3b.bin",
"short_description": "Code Completion",
"description": "This model focuses on code completion. The training mixture includes 20 different languages, listed here in descending order of number of tokens: Markdown, Java, JavaScript, Python, TypeScript, PHP, SQL, JSX, reStructuredText, Rust, C, CSS, Go, C++, HTML, Vue, Ruby, Jupyter Notebook, R, and Shell. This model binary is converted by Nomic AI with the original Replit model code before it was refactored to use MPT configurations.",
"published_by": "Nomic AI",
"original_author": "Replit, Inc.",
"original_url": "https://huggingface.co/replit/replit-code-v1-3b",
"license": "CC-BY-SA-4.0"
},
{
"name": "nous-hermes",
"display_name": "Nous Hermes",
@@ -47,4 +35,4 @@
"original_url": "https://huggingface.co/ehartford/Wizard-Vicuna-13B-Uncensored",
"license:": "GPL"
}
]
]

33
scripts/publish.sh Executable file
View File

@@ -0,0 +1,33 @@
# Set your variables here.
REPO="jmorganca/ollama"
# Check if VERSION is set
if [[ -z "${VERSION}" ]]; then
echo "VERSION is not set. Please set the VERSION environment variable."
exit 1
fi
OS=$(go env GOOS)
ARCH=$(go env GOARCH)
make app
# Create a new tag if it doesn't exist.
if ! git rev-parse v$VERSION >/dev/null 2>&1; then
git tag v$VERSION
git push origin v$VERSION
fi
mkdir dist
cp app/out/make/zip/${OS}/${ARCH}/Ollama-${OS}-${ARCH}-${VERSION}.zip dist/Ollama-${OS}-${ARCH}.zip
cp ./ollama dist/ollama-${OS}-${ARCH}
# Create a new release.
gh release create v$VERSION
# Upload the zip file.
gh release upload v$VERSION ./dist/Ollama-${OS}-${ARCH}.zip
# Upload the binary.
gh release upload v$VERSION ./dist/ollama-${OS}-${ARCH}

View File

@@ -12,9 +12,7 @@ import (
"github.com/jmorganca/ollama/api"
)
// const directoryURL = "https://ollama.ai/api/models"
// TODO
const directoryURL = "https://raw.githubusercontent.com/jmorganca/ollama/go/models.json"
const directoryURL = "https://ollama.ai/api/models"
type Model struct {
Name string `json:"name"`

View File

@@ -3,12 +3,14 @@ package server
import (
"embed"
"encoding/json"
"errors"
"fmt"
"io"
"log"
"math"
"net"
"net/http"
"os"
"path"
"runtime"
"strings"
@@ -25,6 +27,15 @@ import (
var templatesFS embed.FS
var templates = template.Must(template.ParseFS(templatesFS, "templates/*.prompt"))
func cacheDir() string {
home, err := os.UserHomeDir()
if err != nil {
panic(err)
}
return path.Join(home, ".ollama")
}
func generate(c *gin.Context) {
var req api.GenerateRequest
req.ModelOptions = api.DefaultModelOptions
@@ -37,9 +48,16 @@ func generate(c *gin.Context) {
if remoteModel, _ := getRemote(req.Model); remoteModel != nil {
req.Model = remoteModel.FullName()
}
if _, err := os.Stat(req.Model); err != nil {
if !errors.Is(err, os.ErrNotExist) {
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
return
}
req.Model = path.Join(cacheDir(), "models", req.Model+".bin")
}
modelOpts := getModelOpts(req)
modelOpts.NGPULayers = 1 // hard-code this for now
modelOpts.NGPULayers = 1 // hard-code this for now
model, err := llama.New(req.Model, modelOpts)
if err != nil {
@@ -107,6 +125,10 @@ func generate(c *gin.Context) {
func Serve(ln net.Listener) error {
r := gin.Default()
r.GET("/", func(c *gin.Context) {
c.String(http.StatusOK, "Ollama is running")
})
r.POST("api/pull", func(c *gin.Context) {
var req api.PullRequest
if err := c.ShouldBindJSON(&req); err != nil {
@@ -118,6 +140,17 @@ func Serve(ln net.Listener) error {
go func() {
defer close(progressCh)
if err := pull(req.Model, progressCh); err != nil {
var opError *net.OpError
if errors.As(err, &opError) {
result := api.PullProgress{
Error: api.Error{
Code: http.StatusBadGateway,
Message: "failed to get models from directory",
},
}
c.JSON(http.StatusBadGateway, result)
return
}
c.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
return
}

View File

@@ -1,2 +1,3 @@
{{ .Prompt }}
### Response:

20
web/app/download/page.tsx Normal file
View File

@@ -0,0 +1,20 @@
import { Octokit } from '@octokit/rest'
import { redirect } from 'next/navigation'
const octokit = new Octokit()
export default async function Download() {
const { data } = await octokit.repos.getLatestRelease({
owner: 'jmorganca',
repo: 'ollama',
})
// todo: get the correct asset for the current arch/os
const asset = data.assets.find(a => a.name.toLowerCase().includes('darwin') && a.name.toLowerCase().includes('.zip'))
if (asset) {
redirect(asset.browser_download_url)
}
return null
}

BIN
web/app/icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

View File

@@ -1,10 +0,0 @@
<svg width="32" height="32" viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_202_1460)">
<path d="M5.21875 16.125C5.21875 13.0156 6.20312 10.3906 8.17188 8.25C10.1562 6.09375 12.7031 5.01562 15.8125 5.01562C18.9062 5.01562 21.4688 6.03125 23.5 8.0625C25.5312 10.0938 26.5469 12.7969 26.5469 16.1719C26.5469 19.2812 25.5625 21.9844 23.5938 24.2812C21.625 26.5781 19.0781 27.7266 15.9531 27.7266C12.9531 27.7266 10.4141 26.625 8.33594 24.4219C6.25781 22.2188 5.21875 19.4531 5.21875 16.125ZM15.1562 6.42188C13.9219 6.42188 12.8516 6.82812 11.9453 7.64062C10.3828 9.0625 9.60156 11.5156 9.60156 15C9.60156 17.7812 10.2266 20.375 11.4766 22.7812C12.7422 25.1875 14.4922 26.3906 16.7266 26.3906C18.4766 26.3906 19.8203 25.5859 20.7578 23.9766C21.7109 22.3672 22.1875 20.2578 22.1875 17.6484C22.1875 14.9453 21.5859 12.3984 20.3828 10.0078C19.1797 7.61719 17.4375 6.42188 15.1562 6.42188Z" fill="black"/>
</g>
<defs>
<clipPath id="clip0_202_1460">
<rect width="32" height="32" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -1,22 +1,25 @@
import { AiFillApple } from 'react-icons/ai'
import models from '../../models.json'
export default async function Home() {
return (
<main className='flex min-h-screen max-w-2xl flex-col p-4 lg:p-24'>
<h1 className='font-serif text-3xl'>ollama</h1>
<section className='my-8'>
<p className='my-3 mb-8 max-w-md'>
<img src='/ollama.png' className='w-20 h-auto' />
<section className='my-4'>
<p className='my-3 max-w-md'>
<a className='underline' href='https://github.com/jmorganca/ollama'>
Ollama
</a>{' '}
is a tool for running large language models.
is a tool for running large language models, currently for macOS with Windows and Linux coming soon.
<br />
<br />
Get started with Ollama using pip:
<a href='/download' target='_blank'>
<button className='bg-black text-white text-sm py-2 px-3 rounded-lg flex items-center gap-2'>
<AiFillApple className='h-auto w-5 relative -top-px' /> Download for macOS
</button>
</a>
</p>
<pre className='my-4'>
<code>pip install ollama</code>
</pre>
</section>
<section className='my-4'>
<h2 className='mb-4 text-lg'>Example models you can try running:</h2>

68
web/package-lock.json generated
View File

@@ -1,14 +1,15 @@
{
"name": "web",
"version": "0.1.0",
"version": "0.0.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "web",
"version": "0.1.0",
"version": "0.0.0",
"dependencies": {
"@octokit/rest": "^19.0.13",
"@octokit/types": "^11.0.0",
"@types/node": "20.4.0",
"@types/react": "18.2.14",
"@types/react-dom": "18.2.6",
@@ -20,6 +21,7 @@
"postcss": "8.4.24",
"react": "18.2.0",
"react-dom": "18.2.0",
"react-icons": "^4.10.1",
"semver": "^7.5.3",
"tailwindcss": "3.3.2",
"typescript": "5.1.6"
@@ -393,6 +395,14 @@
"node": ">= 14"
}
},
"node_modules/@octokit/core/node_modules/@octokit/types": {
"version": "9.3.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz",
"integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==",
"dependencies": {
"@octokit/openapi-types": "^18.0.0"
}
},
"node_modules/@octokit/endpoint": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-7.0.6.tgz",
@@ -406,6 +416,14 @@
"node": ">= 14"
}
},
"node_modules/@octokit/endpoint/node_modules/@octokit/types": {
"version": "9.3.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz",
"integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==",
"dependencies": {
"@octokit/openapi-types": "^18.0.0"
}
},
"node_modules/@octokit/graphql": {
"version": "5.0.6",
"resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-5.0.6.tgz",
@@ -419,6 +437,14 @@
"node": ">= 14"
}
},
"node_modules/@octokit/graphql/node_modules/@octokit/types": {
"version": "9.3.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz",
"integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==",
"dependencies": {
"@octokit/openapi-types": "^18.0.0"
}
},
"node_modules/@octokit/openapi-types": {
"version": "18.0.0",
"resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-18.0.0.tgz",
@@ -439,6 +465,14 @@
"@octokit/core": ">=4"
}
},
"node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/types": {
"version": "9.3.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz",
"integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==",
"dependencies": {
"@octokit/openapi-types": "^18.0.0"
}
},
"node_modules/@octokit/plugin-request-log": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz",
@@ -498,6 +532,22 @@
"node": ">= 14"
}
},
"node_modules/@octokit/request-error/node_modules/@octokit/types": {
"version": "9.3.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz",
"integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==",
"dependencies": {
"@octokit/openapi-types": "^18.0.0"
}
},
"node_modules/@octokit/request/node_modules/@octokit/types": {
"version": "9.3.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz",
"integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==",
"dependencies": {
"@octokit/openapi-types": "^18.0.0"
}
},
"node_modules/@octokit/rest": {
"version": "19.0.13",
"resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-19.0.13.tgz",
@@ -518,9 +568,9 @@
"integrity": "sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA=="
},
"node_modules/@octokit/types": {
"version": "9.3.2",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz",
"integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==",
"version": "11.0.0",
"resolved": "https://registry.npmjs.org/@octokit/types/-/types-11.0.0.tgz",
"integrity": "sha512-h4iyfMpQUdub1itwTn6y7z2a3EtPuer1paKfsIbZErv0LBbZYGq6haiPUPJys/LetPqgcX3ft33O16XuS03Anw==",
"dependencies": {
"@octokit/openapi-types": "^18.0.0"
}
@@ -3531,6 +3581,14 @@
"react": "^18.2.0"
}
},
"node_modules/react-icons": {
"version": "4.10.1",
"resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.10.1.tgz",
"integrity": "sha512-/ngzDP/77tlCfqthiiGNZeYFACw85fUjZtLbedmJ5DTlNDIwETxhwBzdOJ21zj4iJdvc0J3y7yOsX3PpxAJzrw==",
"peerDependencies": {
"react": "*"
}
},
"node_modules/react-is": {
"version": "16.13.1",
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",

View File

@@ -9,6 +9,7 @@
},
"dependencies": {
"@octokit/rest": "^19.0.13",
"@octokit/types": "^11.0.0",
"@types/node": "20.4.0",
"@types/react": "18.2.14",
"@types/react-dom": "18.2.6",
@@ -20,6 +21,7 @@
"postcss": "8.4.24",
"react": "18.2.0",
"react-dom": "18.2.0",
"react-icons": "^4.10.1",
"semver": "^7.5.3",
"tailwindcss": "3.3.2",
"typescript": "5.1.6"

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 394 80"><path fill="#000" d="M262 0h68.5v12.7h-27.2v66.6h-13.6V12.7H262V0ZM149 0v12.7H94v20.4h44.3v12.6H94v21h55v12.6H80.5V0h68.7zm34.3 0h-17.8l63.8 79.4h17.9l-32-39.7 32-39.6h-17.9l-23 28.6-23-28.6zm18.3 56.7-9-11-27.1 33.7h17.8l18.3-22.7z"/><path fill="#000" d="M81 79.3 17 0H0v79.3h13.6V17l50.2 62.3H81Zm252.6-.4c-1 0-1.8-.4-2.5-1s-1.1-1.6-1.1-2.6.3-1.8 1-2.5 1.6-1 2.6-1 1.8.3 2.5 1a3.4 3.4 0 0 1 .6 4.3 3.7 3.7 0 0 1-3 1.8zm23.2-33.5h6v23.3c0 2.1-.4 4-1.3 5.5a9.1 9.1 0 0 1-3.8 3.5c-1.6.8-3.5 1.3-5.7 1.3-2 0-3.7-.4-5.3-1s-2.8-1.8-3.7-3.2c-.9-1.3-1.4-3-1.4-5h6c.1.8.3 1.6.7 2.2s1 1.2 1.6 1.5c.7.4 1.5.5 2.4.5 1 0 1.8-.2 2.4-.6a4 4 0 0 0 1.6-1.8c.3-.8.5-1.8.5-3V45.5zm30.9 9.1a4.4 4.4 0 0 0-2-3.3 7.5 7.5 0 0 0-4.3-1.1c-1.3 0-2.4.2-3.3.5-.9.4-1.6 1-2 1.6a3.5 3.5 0 0 0-.3 4c.3.5.7.9 1.3 1.2l1.8 1 2 .5 3.2.8c1.3.3 2.5.7 3.7 1.2a13 13 0 0 1 3.2 1.8 8.1 8.1 0 0 1 3 6.5c0 2-.5 3.7-1.5 5.1a10 10 0 0 1-4.4 3.5c-1.8.8-4.1 1.2-6.8 1.2-2.6 0-4.9-.4-6.8-1.2-2-.8-3.4-2-4.5-3.5a10 10 0 0 1-1.7-5.6h6a5 5 0 0 0 3.5 4.6c1 .4 2.2.6 3.4.6 1.3 0 2.5-.2 3.5-.6 1-.4 1.8-1 2.4-1.7a4 4 0 0 0 .8-2.4c0-.9-.2-1.6-.7-2.2a11 11 0 0 0-2.1-1.4l-3.2-1-3.8-1c-2.8-.7-5-1.7-6.6-3.2a7.2 7.2 0 0 1-2.4-5.7 8 8 0 0 1 1.7-5 10 10 0 0 1 4.3-3.5c2-.8 4-1.2 6.4-1.2 2.3 0 4.4.4 6.2 1.2 1.8.8 3.2 2 4.3 3.4 1 1.4 1.5 3 1.5 5h-5.8z"/></svg>

Before

Width:  |  Height:  |  Size: 1.3 KiB

BIN
web/public/ollama.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 283 64"><path fill="black" d="M141 16c-11 0-19 7-19 18s9 18 20 18c7 0 13-3 16-7l-7-5c-2 3-6 4-9 4-5 0-9-3-10-7h28v-3c0-11-8-18-19-18zm-9 15c1-4 4-7 9-7s8 3 9 7h-18zm117-15c-11 0-19 7-19 18s9 18 20 18c6 0 12-3 16-7l-8-5c-2 3-5 4-8 4-5 0-9-3-11-7h28l1-3c0-11-8-18-19-18zm-10 15c2-4 5-7 10-7s8 3 9 7h-19zm-39 3c0 6 4 10 10 10 4 0 7-2 9-5l8 5c-3 5-9 8-17 8-11 0-19-7-19-18s8-18 19-18c8 0 14 3 17 8l-8 5c-2-3-5-5-9-5-6 0-10 4-10 10zm83-29v46h-9V5h9zM37 0l37 64H0L37 0zm92 5-27 48L74 5h10l18 30 17-30h10zm59 12v10l-3-1c-6 0-10 4-10 10v15h-9V17h9v9c0-5 6-9 13-9z"/></svg>

Before

Width:  |  Height:  |  Size: 629 B