Compare commits
1 Commits
language_s
...
scratch
Author | SHA1 | Date | |
---|---|---|---|
![]() |
2789ed31a7 |
@@ -1,9 +1,8 @@
|
||||
.vscode
|
||||
ollama
|
||||
app
|
||||
macapp
|
||||
dist
|
||||
llm/llama.cpp
|
||||
.env
|
||||
.cache
|
||||
test_data
|
||||
test_data
|
1
.gitattributes
vendored
@@ -1 +0,0 @@
|
||||
llm/ext_server/* linguist-vendored
|
60
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
@@ -1,60 +0,0 @@
|
||||
name: Bug report
|
||||
labels: [bug]
|
||||
description: Something isn't working right.
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: What is the issue?
|
||||
description: What happened? What did you expect to happen?
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: OS
|
||||
description: Which operating system are you using?
|
||||
multiple: true
|
||||
options:
|
||||
- Linux
|
||||
- macOS
|
||||
- Windows
|
||||
- Docker
|
||||
- WSL2
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: gpu
|
||||
attributes:
|
||||
label: GPU
|
||||
description: Which GPU are you using?
|
||||
multiple: true
|
||||
options:
|
||||
- Nvidia
|
||||
- AMD
|
||||
- Intel
|
||||
- Apple
|
||||
- Other
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
id: cpu
|
||||
attributes:
|
||||
label: CPU
|
||||
description: Which CPU are you using?
|
||||
multiple: true
|
||||
options:
|
||||
- Intel
|
||||
- AMD
|
||||
- Apple
|
||||
- Other
|
||||
validations:
|
||||
required: false
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Ollama version
|
||||
description: What version of Ollama are you using? (`ollama --version`)
|
||||
placeholder: e.g., 0.1.32
|
||||
validations:
|
||||
required: false
|
6
.github/ISSUE_TEMPLATE/20_feature_request.md
vendored
@@ -1,6 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Request a new feature
|
||||
labels: feature request
|
||||
---
|
||||
|
5
.github/ISSUE_TEMPLATE/30_model_request.md
vendored
@@ -1,5 +0,0 @@
|
||||
---
|
||||
name: Model request
|
||||
about: Request support for a new model to be added to Ollama
|
||||
labels: model request
|
||||
---
|
8
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,8 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Help
|
||||
url: https://discord.com/invite/ollama
|
||||
about: Please join our Discord server for help using Ollama
|
||||
- name: Troubleshooting
|
||||
url: https://github.com/ollama/ollama/blob/main/docs/faq.md#faq
|
||||
about: See the FAQ for common issues and solutions
|
24
.github/workflows/latest.yaml
vendored
@@ -1,24 +0,0 @@
|
||||
name: latest
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
|
||||
jobs:
|
||||
update-latest:
|
||||
environment: release
|
||||
runs-on: linux
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
- name: Tag images as latest
|
||||
env:
|
||||
PUSH: "1"
|
||||
shell: bash
|
||||
run: |
|
||||
export "VERSION=${GITHUB_REF_NAME#v}"
|
||||
./scripts/tag_latest.sh
|
474
.github/workflows/release.yaml
vendored
@@ -1,474 +0,0 @@
|
||||
name: release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
# Full build of the Mac assets
|
||||
build-darwin:
|
||||
runs-on: macos-12
|
||||
environment: release
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: |
|
||||
echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
echo "RELEASE_VERSION=$(echo ${GITHUB_REF_NAME} | cut -f1 -d-)" >> $GITHUB_ENV
|
||||
- name: key
|
||||
env:
|
||||
MACOS_SIGNING_KEY: ${{ secrets.MACOS_SIGNING_KEY }}
|
||||
MACOS_SIGNING_KEY_PASSWORD: ${{ secrets.MACOS_SIGNING_KEY_PASSWORD }}
|
||||
run: |
|
||||
echo $MACOS_SIGNING_KEY | base64 --decode > certificate.p12
|
||||
security create-keychain -p password build.keychain
|
||||
security default-keychain -s build.keychain
|
||||
security unlock-keychain -p password build.keychain
|
||||
security import certificate.p12 -k build.keychain -P $MACOS_SIGNING_KEY_PASSWORD -T /usr/bin/codesign
|
||||
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k password build.keychain
|
||||
security set-keychain-settings -lut 3600 build.keychain
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- name: Build Darwin
|
||||
env:
|
||||
APPLE_IDENTITY: ${{ secrets.APPLE_IDENTITY }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ vars.APPLE_TEAM_ID }}
|
||||
APPLE_ID: ${{ vars.APPLE_ID }}
|
||||
SDKROOT: /Applications/Xcode_13.4.1.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk
|
||||
DEVELOPER_DIR: /Applications/Xcode_13.4.1.app/Contents/Developer
|
||||
run: |
|
||||
./scripts/build_darwin.sh
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: dist-darwin
|
||||
path: |
|
||||
dist/*arwin*
|
||||
!dist/*-cov
|
||||
|
||||
# Windows builds take a long time to both install the dependencies and build, so parallelize
|
||||
# CPU generation step
|
||||
generate-windows-cpu:
|
||||
environment: release
|
||||
runs-on: windows
|
||||
env:
|
||||
KEY_CONTAINER: ${{ vars.KEY_CONTAINER }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
project_id: 'ollama'
|
||||
credentials_json: '${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}'
|
||||
- run: echo "${{ vars.OLLAMA_CERT }}" > ollama_inc.crt
|
||||
- name: install Windows SDK 8.1 to get signtool
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading SDK"
|
||||
Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${env:RUNNER_TEMP}\sdksetup.exe"
|
||||
Start-Process "${env:RUNNER_TEMP}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait
|
||||
write-host "Win SDK 8.1 installed"
|
||||
gci -path 'C:\Program Files (x86)\Windows Kits\' -r -fi 'signtool.exe'
|
||||
- name: install signing plugin
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading plugin"
|
||||
Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${env:RUNNER_TEMP}\plugin.zip"
|
||||
Expand-Archive -Path "${env:RUNNER_TEMP}\plugin.zip" -DestinationPath ${env:RUNNER_TEMP}\plugin\
|
||||
write-host "Installing plugin"
|
||||
& "${env:RUNNER_TEMP}\plugin\*\kmscng.msi" /quiet
|
||||
write-host "plugin installed"
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- run: go get ./...
|
||||
- run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$env:PATH"
|
||||
go generate -x ./...
|
||||
name: go generate
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: generate-windows-cpu
|
||||
path: |
|
||||
llm/build/**/bin/*
|
||||
llm/build/**/*.a
|
||||
dist/windows-amd64/**
|
||||
|
||||
# ROCm generation step
|
||||
generate-windows-rocm:
|
||||
environment: release
|
||||
runs-on: windows
|
||||
env:
|
||||
KEY_CONTAINER: ${{ vars.KEY_CONTAINER }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
project_id: 'ollama'
|
||||
credentials_json: '${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}'
|
||||
- run: echo "${{ vars.OLLAMA_CERT }}" > ollama_inc.crt
|
||||
- name: install Windows SDK 8.1 to get signtool
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading SDK"
|
||||
Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${env:RUNNER_TEMP}\sdksetup.exe"
|
||||
Start-Process "${env:RUNNER_TEMP}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait
|
||||
write-host "Win SDK 8.1 installed"
|
||||
gci -path 'C:\Program Files (x86)\Windows Kits\' -r -fi 'signtool.exe'
|
||||
- name: install signing plugin
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading plugin"
|
||||
Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${env:RUNNER_TEMP}\plugin.zip"
|
||||
Expand-Archive -Path "${env:RUNNER_TEMP}\plugin.zip" -DestinationPath ${env:RUNNER_TEMP}\plugin\
|
||||
write-host "Installing plugin"
|
||||
& "${env:RUNNER_TEMP}\plugin\*\kmscng.msi" /quiet
|
||||
write-host "plugin installed"
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- name: 'Install ROCm'
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading AMD HIP Installer"
|
||||
Invoke-WebRequest -Uri "https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-23.Q4-WinSvr2022-For-HIP.exe" -OutFile "${env:RUNNER_TEMP}\rocm-install.exe"
|
||||
write-host "Installing AMD HIP"
|
||||
Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -Wait
|
||||
write-host "Completed AMD HIP"
|
||||
- name: 'Verify ROCm'
|
||||
run: |
|
||||
& 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' --version
|
||||
- run: go get ./...
|
||||
- run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$env:PATH"
|
||||
$env:OLLAMA_SKIP_CPU_GENERATE="1"
|
||||
$env:HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path)
|
||||
go generate -x ./...
|
||||
name: go generate
|
||||
- name: 'gather rocm dependencies'
|
||||
run: |
|
||||
$HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path)
|
||||
md "dist\deps\bin\rocblas\library"
|
||||
cp "${HIP_PATH}\bin\hipblas.dll" "dist\deps\bin\"
|
||||
cp "${HIP_PATH}\bin\rocblas.dll" "dist\deps\bin\"
|
||||
cp "${HIP_PATH}\bin\rocblas\library\*" "dist\deps\bin\rocblas\library\"
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: generate-windows-rocm
|
||||
path: |
|
||||
llm/build/**/bin/*
|
||||
dist/windows-amd64/**
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: windows-rocm-deps
|
||||
path: dist/deps/*
|
||||
|
||||
# CUDA generation step
|
||||
generate-windows-cuda:
|
||||
environment: release
|
||||
runs-on: windows
|
||||
env:
|
||||
KEY_CONTAINER: ${{ vars.KEY_CONTAINER }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
project_id: 'ollama'
|
||||
credentials_json: '${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}'
|
||||
- run: echo "${{ vars.OLLAMA_CERT }}" > ollama_inc.crt
|
||||
- name: install Windows SDK 8.1 to get signtool
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading SDK"
|
||||
Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${env:RUNNER_TEMP}\sdksetup.exe"
|
||||
Start-Process "${env:RUNNER_TEMP}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait
|
||||
write-host "Win SDK 8.1 installed"
|
||||
gci -path 'C:\Program Files (x86)\Windows Kits\' -r -fi 'signtool.exe'
|
||||
- name: install signing plugin
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading plugin"
|
||||
Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${env:RUNNER_TEMP}\plugin.zip"
|
||||
Expand-Archive -Path "${env:RUNNER_TEMP}\plugin.zip" -DestinationPath ${env:RUNNER_TEMP}\plugin\
|
||||
write-host "Installing plugin"
|
||||
& "${env:RUNNER_TEMP}\plugin\*\kmscng.msi" /quiet
|
||||
write-host "plugin installed"
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- name: 'Install CUDA'
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading CUDA Installer"
|
||||
Invoke-WebRequest -Uri "https://developer.download.nvidia.com/compute/cuda/11.3.1/local_installers/cuda_11.3.1_465.89_win10.exe" -OutFile "${env:RUNNER_TEMP}\cuda-install.exe"
|
||||
write-host "Installing CUDA"
|
||||
Start-Process "${env:RUNNER_TEMP}\cuda-install.exe" -ArgumentList '-s' -NoNewWindow -Wait
|
||||
write-host "Completed CUDA"
|
||||
$cudaPath=((resolve-path "c:\Program Files\NVIDIA*\CUDA\v*\bin\nvcc.exe")[0].path | split-path | split-path)
|
||||
$cudaVer=($cudaPath | split-path -leaf ) -replace 'v(\d+).(\d+)', '$1_$2'
|
||||
echo "$cudaPath\bin" >> $env:GITHUB_PATH
|
||||
echo "CUDA_PATH=$cudaPath" >> $env:GITHUB_ENV
|
||||
echo "CUDA_PATH_V${cudaVer}=$cudaPath" >> $env:GITHUB_ENV
|
||||
echo "CUDA_PATH_VX_Y=CUDA_PATH_V${cudaVer}" >> $env:GITHUB_ENV
|
||||
- name: 'Verify CUDA'
|
||||
run: nvcc -V
|
||||
- run: go get ./...
|
||||
- name: go generate
|
||||
run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
$cudabin=(get-command nvcc).source | split-path
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$cudabin;$env:PATH"
|
||||
$env:OLLAMA_SKIP_CPU_GENERATE="1"
|
||||
go generate -x ./...
|
||||
- name: 'gather cuda dependencies'
|
||||
run: |
|
||||
$NVIDIA_DIR=(resolve-path 'C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\*\bin\')[0]
|
||||
md "dist\deps"
|
||||
cp "${NVIDIA_DIR}\cudart64_*.dll" "dist\deps\"
|
||||
cp "${NVIDIA_DIR}\cublas64_*.dll" "dist\deps\"
|
||||
cp "${NVIDIA_DIR}\cublasLt64_*.dll" "dist\deps\"
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: generate-windows-cuda
|
||||
path: |
|
||||
llm/build/**/bin/*
|
||||
dist/windows-amd64/**
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: windows-cuda-deps
|
||||
path: dist/deps/*
|
||||
|
||||
# Import the prior generation steps and build the final windows assets
|
||||
build-windows:
|
||||
environment: release
|
||||
runs-on: windows
|
||||
needs:
|
||||
- generate-windows-cuda
|
||||
- generate-windows-rocm
|
||||
- generate-windows-cpu
|
||||
env:
|
||||
KEY_CONTAINER: ${{ vars.KEY_CONTAINER }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- uses: 'google-github-actions/auth@v2'
|
||||
with:
|
||||
project_id: 'ollama'
|
||||
credentials_json: '${{ secrets.GOOGLE_SIGNING_CREDENTIALS }}'
|
||||
- run: echo "${{ vars.OLLAMA_CERT }}" > ollama_inc.crt
|
||||
- name: install Windows SDK 8.1 to get signtool
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading SDK"
|
||||
Invoke-WebRequest -Uri "https://go.microsoft.com/fwlink/p/?LinkId=323507" -OutFile "${env:RUNNER_TEMP}\sdksetup.exe"
|
||||
Start-Process "${env:RUNNER_TEMP}\sdksetup.exe" -ArgumentList @("/q") -NoNewWindow -Wait
|
||||
write-host "Win SDK 8.1 installed"
|
||||
gci -path 'C:\Program Files (x86)\Windows Kits\' -r -fi 'signtool.exe'
|
||||
- name: install signing plugin
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading plugin"
|
||||
Invoke-WebRequest -Uri "https://github.com/GoogleCloudPlatform/kms-integrations/releases/download/cng-v1.0/kmscng-1.0-windows-amd64.zip" -OutFile "${env:RUNNER_TEMP}\plugin.zip"
|
||||
Expand-Archive -Path "${env:RUNNER_TEMP}\plugin.zip" -DestinationPath ${env:RUNNER_TEMP}\plugin\
|
||||
write-host "Installing plugin"
|
||||
& "${env:RUNNER_TEMP}\plugin\*\kmscng.msi" /quiet
|
||||
write-host "plugin installed"
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- run: go get
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: generate-windows-cpu
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: generate-windows-cuda
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: windows-cuda-deps
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: windows-rocm-deps
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: generate-windows-rocm
|
||||
- run: dir llm/build
|
||||
- run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$env:PATH"
|
||||
$env:OLLAMA_SKIP_GENERATE="1"
|
||||
& .\scripts\build_windows.ps1
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: dist-windows
|
||||
path: |
|
||||
dist/OllamaSetup.exe
|
||||
dist/ollama-windows-*.zip
|
||||
|
||||
# Linux x86 assets built using the container based build
|
||||
build-linux-amd64:
|
||||
environment: release
|
||||
runs-on: linux
|
||||
env:
|
||||
OLLAMA_SKIP_MANIFEST_CREATE: '1'
|
||||
BUILD_ARCH: amd64
|
||||
PUSH: '1'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
- run: |
|
||||
./scripts/build_linux.sh
|
||||
./scripts/build_docker.sh
|
||||
mv dist/deps/* dist/
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: dist-linux-amd64
|
||||
path: |
|
||||
dist/*linux*
|
||||
!dist/*-cov
|
||||
|
||||
# Linux ARM assets built using the container based build
|
||||
# (at present, docker isn't pre-installed on arm ubunutu images)
|
||||
build-linux-arm64:
|
||||
environment: release
|
||||
runs-on: linux-arm64
|
||||
env:
|
||||
OLLAMA_SKIP_MANIFEST_CREATE: '1'
|
||||
BUILD_ARCH: arm64
|
||||
PUSH: '1'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
- name: 'Install Docker'
|
||||
run: |
|
||||
# Add Docker's official GPG key:
|
||||
env
|
||||
uname -a
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y ca-certificates curl
|
||||
sudo install -m 0755 -d /etc/apt/keyrings
|
||||
sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
|
||||
sudo chmod a+r /etc/apt/keyrings/docker.asc
|
||||
|
||||
# Add the repository to Apt sources:
|
||||
echo \
|
||||
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
|
||||
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
|
||||
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y docker-ce docker-ce-cli containerd.io
|
||||
sudo usermod -aG docker $USER
|
||||
sudo apt-get install acl
|
||||
sudo setfacl --modify user:$USER:rw /var/run/docker.sock
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
- run: |
|
||||
./scripts/build_linux.sh
|
||||
./scripts/build_docker.sh
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: dist-linux-arm64
|
||||
path: |
|
||||
dist/*linux*
|
||||
!dist/*-cov
|
||||
|
||||
# Aggregate all the assets and ship a release
|
||||
release:
|
||||
needs:
|
||||
- build-darwin
|
||||
- build-windows
|
||||
- build-linux-amd64
|
||||
- build-linux-arm64
|
||||
runs-on: linux
|
||||
environment: release
|
||||
permissions:
|
||||
contents: write
|
||||
env:
|
||||
OLLAMA_SKIP_IMAGE_BUILD: '1'
|
||||
PUSH: '1'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set Version
|
||||
shell: bash
|
||||
run: |
|
||||
echo "VERSION=${GITHUB_REF_NAME#v}" >> $GITHUB_ENV
|
||||
echo "RELEASE_VERSION=$(echo ${GITHUB_REF_NAME} | cut -f1 -d-)" >> $GITHUB_ENV
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ vars.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_ACCESS_TOKEN }}
|
||||
- run: ./scripts/build_docker.sh
|
||||
- name: Retrieve built artifact
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: dist
|
||||
pattern: dist-*
|
||||
merge-multiple: true
|
||||
- run: |
|
||||
ls -lh dist/
|
||||
(cd dist; sha256sum * > sha256sum.txt)
|
||||
cat dist/sha256sum.txt
|
||||
- uses: ncipollo/release-action@v1
|
||||
with:
|
||||
name: ${{ env.RELEASE_VERSION }}
|
||||
allowUpdates: true
|
||||
artifacts: 'dist/*'
|
||||
draft: true
|
||||
prerelease: true
|
||||
omitBodyDuringUpdate: true
|
||||
generateReleaseNotes: true
|
||||
omitDraftDuringUpdate: true
|
||||
omitPrereleaseDuringUpdate: true
|
||||
replacesArtifacts: true
|
287
.github/workflows/test.yaml
vendored
@@ -1,59 +1,18 @@
|
||||
name: test
|
||||
|
||||
concurrency:
|
||||
# For PRs, later CI runs preempt previous ones. e.g. a force push on a PR
|
||||
# cancels running CI jobs and starts all new ones.
|
||||
#
|
||||
# For non-PR pushes, concurrency.group needs to be unique for every distinct
|
||||
# CI run we want to have happen. Use run_id, which in practice means all
|
||||
# non-PR CI runs will be allowed to run without preempting each other.
|
||||
group: ${{ github.workflow }}-$${{ github.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '**/*'
|
||||
- '!docs/**'
|
||||
- '!README.md'
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
GENERATE: ${{ steps.changes.outputs.GENERATE }}
|
||||
GENERATE_CUDA: ${{ steps.changes.outputs.GENERATE_CUDA }}
|
||||
GENERATE_ROCM: ${{ steps.changes.outputs.GENERATE_ROCM }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- id: changes
|
||||
run: |
|
||||
changed() {
|
||||
git diff-tree -r --no-commit-id --name-only \
|
||||
$(git merge-base ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }}) \
|
||||
${{ github.event.pull_request.head.sha }} \
|
||||
| xargs python3 -c "import sys; print(any([x.startswith('$1') for x in sys.argv[1:]]))"
|
||||
}
|
||||
|
||||
{
|
||||
echo GENERATE=$(changed llm/)
|
||||
echo GENERATE_CUDA=$(changed llm/)
|
||||
echo GENERATE_ROCM=$(changed llm/)
|
||||
} >>$GITHUB_OUTPUT
|
||||
|
||||
generate:
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.GENERATE == 'True' }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-2019]
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
arch: [amd64, arm64]
|
||||
exclude:
|
||||
- os: ubuntu-latest
|
||||
arch: arm64
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
arch: arm64
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
@@ -62,258 +21,86 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
go-version: '1.21'
|
||||
cache: true
|
||||
- if: ${{ startsWith(matrix.os, 'windows-') }}
|
||||
shell: pwsh
|
||||
run: |
|
||||
$path = vswhere -latest -products * -requires Microsoft.VisualStudio.Component.VC.Tools.x86.x64 -property installationPath
|
||||
if ($path) {
|
||||
$path = join-path $path 'Common7\Tools\vsdevcmd.bat'
|
||||
if (test-path $path) {
|
||||
cmd /s /c """$path"" $args && set" | where { $_ -match '(\w+)=(.*)' } | foreach {
|
||||
echo "$($Matches[1])=$($Matches[2])" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf8 -Append
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
echo "C:\Program Files\Git\usr\bin" | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf8 -Append
|
||||
- run: go get ./...
|
||||
- run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
$gccpath=(get-command gcc).source | split-path -parent
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$gccpath;$env:PATH"
|
||||
echo $env:PATH
|
||||
go generate -x ./...
|
||||
if: ${{ startsWith(matrix.os, 'windows-') }}
|
||||
name: 'Windows Go Generate'
|
||||
- run: go generate -x ./...
|
||||
if: ${{ ! startsWith(matrix.os, 'windows-') }}
|
||||
name: 'Unix Go Generate'
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}-${{ matrix.arch }}-libraries
|
||||
path: |
|
||||
llm/build/**/bin/*
|
||||
llm/build/**/*.a
|
||||
generate-cuda:
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.GENERATE_CUDA == 'True' }}
|
||||
strategy:
|
||||
matrix:
|
||||
cuda-version:
|
||||
- '11.8.0'
|
||||
runs-on: linux
|
||||
container: nvidia/cuda:${{ matrix.cuda-version }}-devel-ubuntu20.04
|
||||
steps:
|
||||
- run: |
|
||||
apt-get update && apt-get install -y git build-essential curl
|
||||
curl -fsSL https://github.com/Kitware/CMake/releases/download/v3.28.1/cmake-3.28.1-linux-x86_64.tar.gz \
|
||||
| tar -zx -C /usr --strip-components 1
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- run: go get ./...
|
||||
- run: |
|
||||
git config --global --add safe.directory /__w/ollama/ollama
|
||||
go generate -x ./...
|
||||
env:
|
||||
OLLAMA_SKIP_CPU_GENERATE: '1'
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: cuda-${{ matrix.cuda-version }}-libraries
|
||||
path: |
|
||||
llm/build/**/bin/*
|
||||
dist/windows-amd64/**
|
||||
generate-rocm:
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.GENERATE_ROCM == 'True' }}
|
||||
strategy:
|
||||
matrix:
|
||||
rocm-version:
|
||||
- '6.0.2'
|
||||
runs-on: linux
|
||||
container: rocm/dev-ubuntu-20.04:${{ matrix.rocm-version }}
|
||||
steps:
|
||||
- run: |
|
||||
apt-get update && apt-get install -y git build-essential curl rocm-libs
|
||||
curl -fsSL https://github.com/Kitware/CMake/releases/download/v3.28.1/cmake-3.28.1-linux-x86_64.tar.gz \
|
||||
| tar -zx -C /usr --strip-components 1
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- run: go get ./...
|
||||
- run: |
|
||||
git config --global --add safe.directory /__w/ollama/ollama
|
||||
go generate -x ./...
|
||||
env:
|
||||
OLLAMA_SKIP_CPU_GENERATE: '1'
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: rocm-${{ matrix.rocm-version }}-libraries
|
||||
path: |
|
||||
llm/build/**/bin/*
|
||||
dist/windows-amd64/**
|
||||
|
||||
# ROCm generation step
|
||||
generate-windows-rocm:
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.GENERATE_ROCM == 'True' }}
|
||||
runs-on: windows
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- name: 'Install ROCm'
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading AMD HIP Installer"
|
||||
Invoke-WebRequest -Uri "https://download.amd.com/developer/eula/rocm-hub/AMD-Software-PRO-Edition-23.Q4-WinSvr2022-For-HIP.exe" -OutFile "${env:RUNNER_TEMP}\rocm-install.exe"
|
||||
write-host "Installing AMD HIP"
|
||||
Start-Process "${env:RUNNER_TEMP}\rocm-install.exe" -ArgumentList '-install' -NoNewWindow -Wait
|
||||
write-host "Completed AMD HIP"
|
||||
- name: 'Verify ROCm'
|
||||
run: |
|
||||
& 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' --version
|
||||
- run: go get ./...
|
||||
- run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$env:PATH"
|
||||
$env:OLLAMA_SKIP_CPU_GENERATE="1"
|
||||
$env:HIP_PATH=$(Resolve-Path 'C:\Program Files\AMD\ROCm\*\bin\clang.exe' | split-path | split-path)
|
||||
go generate -x ./...
|
||||
name: go generate
|
||||
env:
|
||||
OLLAMA_SKIP_CPU_GENERATE: '1'
|
||||
# TODO - do we need any artifacts?
|
||||
|
||||
# CUDA generation step
|
||||
generate-windows-cuda:
|
||||
needs: [changes]
|
||||
if: ${{ needs.changes.outputs.GENERATE_CUDA == 'True' }}
|
||||
runs-on: windows
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
cache: true
|
||||
- name: 'Install CUDA'
|
||||
run: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
write-host "downloading CUDA Installer"
|
||||
Invoke-WebRequest -Uri "https://developer.download.nvidia.com/compute/cuda/11.3.1/local_installers/cuda_11.3.1_465.89_win10.exe" -OutFile "${env:RUNNER_TEMP}\cuda-install.exe"
|
||||
write-host "Installing CUDA"
|
||||
Start-Process "${env:RUNNER_TEMP}\cuda-install.exe" -ArgumentList '-s' -NoNewWindow -Wait
|
||||
write-host "Completed CUDA"
|
||||
$cudaPath=((resolve-path "c:\Program Files\NVIDIA*\CUDA\v*\bin\nvcc.exe")[0].path | split-path | split-path)
|
||||
$cudaVer=($cudaPath | split-path -leaf ) -replace 'v(\d+).(\d+)', '$1_$2'
|
||||
echo "$cudaPath\bin" >> $env:GITHUB_PATH
|
||||
echo "CUDA_PATH=$cudaPath" >> $env:GITHUB_ENV
|
||||
echo "CUDA_PATH_V${cudaVer}=$cudaPath" >> $env:GITHUB_ENV
|
||||
echo "CUDA_PATH_VX_Y=CUDA_PATH_V${cudaVer}" >> $env:GITHUB_ENV
|
||||
- name: 'Verify CUDA'
|
||||
run: nvcc -V
|
||||
- run: go get ./...
|
||||
- name: go generate
|
||||
run: |
|
||||
$gopath=(get-command go).source | split-path -parent
|
||||
$cudabin=(get-command nvcc).source | split-path
|
||||
& "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\Launch-VsDevShell.ps1"
|
||||
cd $env:GITHUB_WORKSPACE
|
||||
$env:CMAKE_SYSTEM_VERSION="10.0.22621.0"
|
||||
$env:PATH="$gopath;$cudabin;$env:PATH"
|
||||
$env:OLLAMA_SKIP_CPU_GENERATE="1"
|
||||
go generate -x ./...
|
||||
env:
|
||||
OLLAMA_SKIP_CPU_GENERATE: '1'
|
||||
# TODO - do we need any artifacts?
|
||||
|
||||
llm/llama.cpp/build/**/lib/*
|
||||
lint:
|
||||
needs: generate
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-2019]
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
arch: [amd64, arm64]
|
||||
exclude:
|
||||
- os: ubuntu-latest
|
||||
arch: arm64
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
arch: arm64
|
||||
- os: macos-latest
|
||||
arch: amd64
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
GOARCH: ${{ matrix.arch }}
|
||||
CGO_ENABLED: '1'
|
||||
CGO_ENABLED: "1"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
go-version: '1.21'
|
||||
cache: false
|
||||
- run: |
|
||||
case ${{ matrix.arch }} in
|
||||
amd64) echo ARCH=x86_64 ;;
|
||||
arm64) echo ARCH=arm64 ;;
|
||||
esac >>$GITHUB_ENV
|
||||
shell: bash
|
||||
- run: |
|
||||
mkdir -p llm/build/linux/$ARCH/stub/bin
|
||||
touch llm/build/linux/$ARCH/stub/bin/ollama_llama_server
|
||||
if: ${{ startsWith(matrix.os, 'ubuntu-') }}
|
||||
- run: |
|
||||
mkdir -p llm/build/darwin/$ARCH/stub/bin
|
||||
touch llm/build/darwin/$ARCH/stub/bin/ollama_llama_server
|
||||
if: ${{ startsWith(matrix.os, 'macos-') }}
|
||||
- uses: golangci/golangci-lint-action@v4
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
args: --timeout 8m0s -v
|
||||
name: ${{ matrix.os }}-${{ matrix.arch }}-libraries
|
||||
path: llm/llama.cpp/build
|
||||
- uses: golangci/golangci-lint-action@v3
|
||||
test:
|
||||
needs: generate
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-2019]
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
arch: [amd64]
|
||||
exclude:
|
||||
- os: ubuntu-latest
|
||||
arch: arm64
|
||||
- os: windows-2019
|
||||
- os: windows-latest
|
||||
arch: arm64
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
GOARCH: ${{ matrix.arch }}
|
||||
CGO_ENABLED: '1'
|
||||
OLLAMA_CPU_TARGET: 'static'
|
||||
CGO_ENABLED: "1"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: go.mod
|
||||
go-version: '1.21'
|
||||
cache: true
|
||||
- run: |
|
||||
case ${{ matrix.arch }} in
|
||||
amd64) echo ARCH=x86_64 ;;
|
||||
arm64) echo ARCH=arm64 ;;
|
||||
esac >>$GITHUB_ENV
|
||||
shell: bash
|
||||
- run: |
|
||||
mkdir -p llm/build/linux/$ARCH/stub/bin
|
||||
touch llm/build/linux/$ARCH/stub/bin/ollama_llama_server
|
||||
if: ${{ startsWith(matrix.os, 'ubuntu-') }}
|
||||
- run: |
|
||||
mkdir -p llm/build/darwin/$ARCH/stub/bin
|
||||
touch llm/build/darwin/$ARCH/stub/bin/ollama_llama_server
|
||||
if: ${{ startsWith(matrix.os, 'macos-') }}
|
||||
shell: bash
|
||||
- run: go generate ./...
|
||||
- run: go get
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}-${{ matrix.arch }}-libraries
|
||||
path: llm/llama.cpp/build
|
||||
- run: go build
|
||||
- run: go test -v ./...
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.os }}-binaries
|
||||
path: ollama
|
||||
|
5
.gitignore
vendored
@@ -9,7 +9,4 @@ ggml-metal.metal
|
||||
.cache
|
||||
*.exe
|
||||
.idea
|
||||
test_data
|
||||
*.crt
|
||||
llm/build
|
||||
__debug_bin*
|
||||
test_data
|
@@ -15,3 +15,13 @@ linters:
|
||||
- misspell
|
||||
- nilerr
|
||||
- unused
|
||||
linters-settings:
|
||||
errcheck:
|
||||
# exclude the following functions since we don't generally
|
||||
# need to be concerned with the returned errors
|
||||
exclude-functions:
|
||||
- encoding/binary.Read
|
||||
- (*os.File).Seek
|
||||
- (*bufio.Writer).WriteString
|
||||
- (*github.com/spf13/pflag.FlagSet).Set
|
||||
- (*github.com/jmorganca/ollama/llm.readSeekOffset).Seek
|
||||
|
145
Dockerfile
@@ -1,144 +1,29 @@
|
||||
ARG GOLANG_VERSION=1.22.1
|
||||
ARG CMAKE_VERSION=3.22.1
|
||||
# this CUDA_VERSION corresponds with the one specified in docs/gpu.md
|
||||
ARG CUDA_VERSION=11.3.1
|
||||
ARG ROCM_VERSION=6.0.2
|
||||
FROM nvidia/cuda:11.8.0-devel-ubuntu22.04
|
||||
|
||||
# Copy the minimal context we need to run the generate scripts
|
||||
FROM scratch AS llm-code
|
||||
COPY .git .git
|
||||
COPY .gitmodules .gitmodules
|
||||
COPY llm llm
|
||||
ARG TARGETARCH
|
||||
ARG GOFLAGS="'-ldflags=-w -s'"
|
||||
|
||||
FROM --platform=linux/amd64 nvidia/cuda:$CUDA_VERSION-devel-centos7 AS cuda-build-amd64
|
||||
ARG CMAKE_VERSION
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
COPY --from=llm-code / /go/src/github.com/ollama/ollama/
|
||||
WORKDIR /go/src/github.com/ollama/ollama/llm/generate
|
||||
ARG CGO_CFLAGS
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh
|
||||
WORKDIR /go/src/github.com/jmorganca/ollama
|
||||
RUN apt-get update && apt-get install -y git build-essential cmake
|
||||
ADD https://dl.google.com/go/go1.21.3.linux-$TARGETARCH.tar.gz /tmp/go1.21.3.tar.gz
|
||||
RUN mkdir -p /usr/local && tar xz -C /usr/local </tmp/go1.21.3.tar.gz
|
||||
|
||||
FROM --platform=linux/arm64 nvidia/cuda:$CUDA_VERSION-devel-rockylinux8 AS cuda-build-arm64
|
||||
ARG CMAKE_VERSION
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/gcc-toolset-10/root/usr/bin:$PATH
|
||||
COPY --from=llm-code / /go/src/github.com/ollama/ollama/
|
||||
WORKDIR /go/src/github.com/ollama/ollama/llm/generate
|
||||
ARG CGO_CFLAGS
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh
|
||||
|
||||
FROM --platform=linux/amd64 rocm/dev-centos-7:${ROCM_VERSION}-complete AS rocm-build-amd64
|
||||
ARG CMAKE_VERSION
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
ENV LIBRARY_PATH /opt/amdgpu/lib64
|
||||
COPY --from=llm-code / /go/src/github.com/ollama/ollama/
|
||||
WORKDIR /go/src/github.com/ollama/ollama/llm/generate
|
||||
ARG CGO_CFLAGS
|
||||
ARG AMDGPU_TARGETS
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh
|
||||
RUN mkdir /tmp/scratch && \
|
||||
for dep in $(zcat /go/src/github.com/ollama/ollama/llm/build/linux/x86_64/rocm*/bin/deps.txt.gz) ; do \
|
||||
cp ${dep} /tmp/scratch/ || exit 1 ; \
|
||||
done && \
|
||||
(cd /opt/rocm/lib && tar cf - rocblas/library) | (cd /tmp/scratch/ && tar xf - ) && \
|
||||
mkdir -p /go/src/github.com/ollama/ollama/dist/deps/ && \
|
||||
(cd /tmp/scratch/ && tar czvf /go/src/github.com/ollama/ollama/dist/deps/ollama-linux-amd64-rocm.tgz . )
|
||||
|
||||
|
||||
FROM --platform=linux/amd64 centos:7 AS cpu-builder-amd64
|
||||
ARG CMAKE_VERSION
|
||||
ARG GOLANG_VERSION
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} GOLANG_VERSION=${GOLANG_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
COPY --from=llm-code / /go/src/github.com/ollama/ollama/
|
||||
ARG OLLAMA_CUSTOM_CPU_DEFS
|
||||
ARG CGO_CFLAGS
|
||||
WORKDIR /go/src/github.com/ollama/ollama/llm/generate
|
||||
|
||||
FROM --platform=linux/amd64 cpu-builder-amd64 AS static-build-amd64
|
||||
RUN OLLAMA_CPU_TARGET="static" sh gen_linux.sh
|
||||
FROM --platform=linux/amd64 cpu-builder-amd64 AS cpu-build-amd64
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_CPU_TARGET="cpu" sh gen_linux.sh
|
||||
FROM --platform=linux/amd64 cpu-builder-amd64 AS cpu_avx-build-amd64
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_CPU_TARGET="cpu_avx" sh gen_linux.sh
|
||||
FROM --platform=linux/amd64 cpu-builder-amd64 AS cpu_avx2-build-amd64
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_CPU_TARGET="cpu_avx2" sh gen_linux.sh
|
||||
|
||||
FROM --platform=linux/arm64 centos:7 AS cpu-builder-arm64
|
||||
ARG CMAKE_VERSION
|
||||
ARG GOLANG_VERSION
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} GOLANG_VERSION=${GOLANG_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
COPY --from=llm-code / /go/src/github.com/ollama/ollama/
|
||||
ARG OLLAMA_CUSTOM_CPU_DEFS
|
||||
ARG CGO_CFLAGS
|
||||
WORKDIR /go/src/github.com/ollama/ollama/llm/generate
|
||||
|
||||
FROM --platform=linux/arm64 cpu-builder-arm64 AS static-build-arm64
|
||||
RUN OLLAMA_CPU_TARGET="static" sh gen_linux.sh
|
||||
FROM --platform=linux/arm64 cpu-builder-arm64 AS cpu-build-arm64
|
||||
RUN OLLAMA_SKIP_STATIC_GENERATE=1 OLLAMA_CPU_TARGET="cpu" sh gen_linux.sh
|
||||
|
||||
|
||||
# Intermediate stage used for ./scripts/build_linux.sh
|
||||
FROM --platform=linux/amd64 cpu-build-amd64 AS build-amd64
|
||||
ENV CGO_ENABLED 1
|
||||
WORKDIR /go/src/github.com/ollama/ollama
|
||||
COPY . .
|
||||
COPY --from=static-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
COPY --from=cpu_avx-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
COPY --from=cpu_avx2-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
COPY --from=cuda-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
COPY --from=rocm-build-amd64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
COPY --from=rocm-build-amd64 /go/src/github.com/ollama/ollama/dist/deps/ ./dist/deps/
|
||||
ARG GOFLAGS
|
||||
ARG CGO_CFLAGS
|
||||
RUN go build -trimpath .
|
||||
ENV GOARCH=$TARGETARCH
|
||||
ENV GOFLAGS=$GOFLAGS
|
||||
RUN /usr/local/go/bin/go generate ./... \
|
||||
&& /usr/local/go/bin/go build .
|
||||
|
||||
# Intermediate stage used for ./scripts/build_linux.sh
|
||||
FROM --platform=linux/arm64 cpu-build-arm64 AS build-arm64
|
||||
ENV CGO_ENABLED 1
|
||||
ARG GOLANG_VERSION
|
||||
WORKDIR /go/src/github.com/ollama/ollama
|
||||
COPY . .
|
||||
COPY --from=static-build-arm64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
COPY --from=cuda-build-arm64 /go/src/github.com/ollama/ollama/llm/build/linux/ llm/build/linux/
|
||||
ARG GOFLAGS
|
||||
ARG CGO_CFLAGS
|
||||
RUN go build -trimpath .
|
||||
|
||||
# Runtime stages
|
||||
FROM --platform=linux/amd64 ubuntu:22.04 as runtime-amd64
|
||||
FROM ubuntu:22.04
|
||||
RUN apt-get update && apt-get install -y ca-certificates
|
||||
COPY --from=build-amd64 /go/src/github.com/ollama/ollama/ollama /bin/ollama
|
||||
FROM --platform=linux/arm64 ubuntu:22.04 as runtime-arm64
|
||||
RUN apt-get update && apt-get install -y ca-certificates
|
||||
COPY --from=build-arm64 /go/src/github.com/ollama/ollama/ollama /bin/ollama
|
||||
|
||||
# Radeon images are much larger so we keep it distinct from the CPU/CUDA image
|
||||
FROM --platform=linux/amd64 rocm/dev-centos-7:${ROCM_VERSION}-complete as runtime-rocm
|
||||
RUN update-pciids
|
||||
COPY --from=build-amd64 /go/src/github.com/ollama/ollama/ollama /bin/ollama
|
||||
COPY --from=0 /go/src/github.com/jmorganca/ollama/ollama /bin/ollama
|
||||
EXPOSE 11434
|
||||
ENV OLLAMA_HOST 0.0.0.0
|
||||
|
||||
ENTRYPOINT ["/bin/ollama"]
|
||||
CMD ["serve"]
|
||||
|
||||
FROM runtime-$TARGETARCH
|
||||
EXPOSE 11434
|
||||
ENV OLLAMA_HOST 0.0.0.0
|
||||
ENV PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
# set some environment variable for better NVIDIA compatibility
|
||||
ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||
ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64
|
||||
ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility
|
||||
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||
|
||||
ENTRYPOINT ["/bin/ollama"]
|
||||
CMD ["serve"]
|
||||
|
99
Dockerfile.build
Normal file
@@ -0,0 +1,99 @@
|
||||
ARG GOLANG_VERSION=1.21.3
|
||||
ARG CMAKE_VERSION=3.22.1
|
||||
ARG CUDA_VERSION=11.3.1
|
||||
|
||||
# Copy the minimal context we need to run the generate scripts
|
||||
FROM scratch AS llm-code
|
||||
COPY .git .git
|
||||
COPY .gitmodules .gitmodules
|
||||
COPY llm llm
|
||||
|
||||
FROM --platform=linux/amd64 nvidia/cuda:$CUDA_VERSION-devel-centos7 AS cuda-build-amd64
|
||||
ARG CMAKE_VERSION
|
||||
ARG CGO_CFLAGS
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
COPY --from=llm-code / /go/src/github.com/jmorganca/ollama/
|
||||
WORKDIR /go/src/github.com/jmorganca/ollama/llm/generate
|
||||
RUN OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh
|
||||
|
||||
FROM --platform=linux/arm64 nvidia/cuda:$CUDA_VERSION-devel-rockylinux8 AS cuda-build-arm64
|
||||
ARG CMAKE_VERSION
|
||||
ARG CGO_CFLAGS
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/gcc-toolset-10/root/usr/bin:$PATH
|
||||
COPY --from=llm-code / /go/src/github.com/jmorganca/ollama/
|
||||
WORKDIR /go/src/github.com/jmorganca/ollama/llm/generate
|
||||
RUN OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh
|
||||
|
||||
FROM --platform=linux/amd64 rocm/dev-centos-7:5.7.1-complete AS rocm-5-build-amd64
|
||||
ARG CMAKE_VERSION
|
||||
ARG CGO_CFLAGS
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
ENV LIBRARY_PATH /opt/amdgpu/lib64
|
||||
COPY --from=llm-code / /go/src/github.com/jmorganca/ollama/
|
||||
WORKDIR /go/src/github.com/jmorganca/ollama/llm/generate
|
||||
RUN OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh
|
||||
|
||||
FROM --platform=linux/amd64 rocm/dev-centos-7:6.0-complete AS rocm-6-build-amd64
|
||||
ARG CMAKE_VERSION
|
||||
ARG CGO_CFLAGS
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
ENV LIBRARY_PATH /opt/amdgpu/lib64
|
||||
COPY --from=llm-code / /go/src/github.com/jmorganca/ollama/
|
||||
WORKDIR /go/src/github.com/jmorganca/ollama/llm/generate
|
||||
RUN OLLAMA_SKIP_CPU_GENERATE=1 sh gen_linux.sh
|
||||
|
||||
FROM --platform=linux/amd64 centos:7 AS cpu-build-amd64
|
||||
ARG CMAKE_VERSION
|
||||
ARG GOLANG_VERSION
|
||||
ARG OLLAMA_CUSTOM_CPU_DEFS
|
||||
ARG CGO_CFLAGS
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} GOLANG_VERSION=${GOLANG_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
COPY --from=llm-code / /go/src/github.com/jmorganca/ollama/
|
||||
WORKDIR /go/src/github.com/jmorganca/ollama/llm/generate
|
||||
RUN sh gen_linux.sh
|
||||
|
||||
FROM --platform=linux/arm64 centos:7 AS cpu-build-arm64
|
||||
ARG CMAKE_VERSION
|
||||
ARG GOLANG_VERSION
|
||||
ARG OLLAMA_CUSTOM_CPU_DEFS
|
||||
ARG CGO_CFLAGS
|
||||
COPY ./scripts/rh_linux_deps.sh /
|
||||
RUN CMAKE_VERSION=${CMAKE_VERSION} GOLANG_VERSION=${GOLANG_VERSION} sh /rh_linux_deps.sh
|
||||
ENV PATH /opt/rh/devtoolset-10/root/usr/bin:$PATH
|
||||
COPY --from=llm-code / /go/src/github.com/jmorganca/ollama/
|
||||
WORKDIR /go/src/github.com/jmorganca/ollama/llm/generate
|
||||
RUN sh gen_linux.sh
|
||||
|
||||
|
||||
FROM --platform=linux/amd64 cpu-build-amd64 AS build-amd64
|
||||
ENV CGO_ENABLED 1
|
||||
ARG GOFLAGS
|
||||
ARG CGO_CFLAGS
|
||||
WORKDIR /go/src/github.com/jmorganca/ollama
|
||||
COPY . .
|
||||
COPY --from=cuda-build-amd64 /go/src/github.com/jmorganca/ollama/llm/llama.cpp/build/linux/ llm/llama.cpp/build/linux/
|
||||
COPY --from=rocm-5-build-amd64 /go/src/github.com/jmorganca/ollama/llm/llama.cpp/build/linux/ llm/llama.cpp/build/linux/
|
||||
COPY --from=rocm-6-build-amd64 /go/src/github.com/jmorganca/ollama/llm/llama.cpp/build/linux/ llm/llama.cpp/build/linux/
|
||||
RUN go build .
|
||||
|
||||
FROM --platform=linux/arm64 cpu-build-arm64 AS build-arm64
|
||||
ENV CGO_ENABLED 1
|
||||
ARG GOLANG_VERSION
|
||||
ARG GOFLAGS
|
||||
ARG CGO_CFLAGS
|
||||
WORKDIR /go/src/github.com/jmorganca/ollama
|
||||
COPY . .
|
||||
COPY --from=cuda-build-arm64 /go/src/github.com/jmorganca/ollama/llm/llama.cpp/build/linux/ llm/llama.cpp/build/linux/
|
||||
RUN go build .
|
||||
|
||||
FROM build-$TARGETARCH
|
176
README.md
@@ -1,5 +1,8 @@
|
||||
<div align="center">
|
||||
<img alt="ollama" height="200px" src="https://github.com/ollama/ollama/assets/3325447/0d0b44e2-8f4a-4e99-9b52-a5c1c741c8f7">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" height="200px" srcset="https://github.com/jmorganca/ollama/assets/3325447/56ea1849-1284-4645-8970-956de6e51c3c">
|
||||
<img alt="logo" height="200px" src="https://github.com/jmorganca/ollama/assets/3325447/0d0b44e2-8f4a-4e99-9b52-a5c1c741c8f7">
|
||||
</picture>
|
||||
</div>
|
||||
|
||||
# Ollama
|
||||
@@ -10,59 +13,53 @@ Get up and running with large language models locally.
|
||||
|
||||
### macOS
|
||||
|
||||
[Download](https://ollama.com/download/Ollama-darwin.zip)
|
||||
[Download](https://ollama.ai/download/Ollama-darwin.zip)
|
||||
|
||||
### Windows preview
|
||||
### Windows
|
||||
|
||||
[Download](https://ollama.com/download/OllamaSetup.exe)
|
||||
Coming soon! For now, you can install Ollama on Windows via WSL2.
|
||||
|
||||
### Linux
|
||||
### Linux & WSL2
|
||||
|
||||
```
|
||||
curl -fsSL https://ollama.com/install.sh | sh
|
||||
curl https://ollama.ai/install.sh | sh
|
||||
```
|
||||
|
||||
[Manual install instructions](https://github.com/ollama/ollama/blob/main/docs/linux.md)
|
||||
[Manual install instructions](https://github.com/jmorganca/ollama/blob/main/docs/linux.md)
|
||||
|
||||
### Docker
|
||||
|
||||
The official [Ollama Docker image](https://hub.docker.com/r/ollama/ollama) `ollama/ollama` is available on Docker Hub.
|
||||
|
||||
### Libraries
|
||||
|
||||
- [ollama-python](https://github.com/ollama/ollama-python)
|
||||
- [ollama-js](https://github.com/ollama/ollama-js)
|
||||
|
||||
## Quickstart
|
||||
|
||||
To run and chat with [Llama 3](https://ollama.com/library/llama3):
|
||||
To run and chat with [Llama 2](https://ollama.ai/library/llama2):
|
||||
|
||||
```
|
||||
ollama run llama3
|
||||
ollama run llama2
|
||||
```
|
||||
|
||||
## Model library
|
||||
|
||||
Ollama supports a list of models available on [ollama.com/library](https://ollama.com/library 'ollama model library')
|
||||
Ollama supports a list of open-source models available on [ollama.ai/library](https://ollama.ai/library 'ollama model library')
|
||||
|
||||
Here are some example models that can be downloaded:
|
||||
Here are some example open-source models that can be downloaded:
|
||||
|
||||
| Model | Parameters | Size | Download |
|
||||
| ------------------ | ---------- | ----- | ------------------------------ |
|
||||
| Llama 3 | 8B | 4.7GB | `ollama run llama3` |
|
||||
| Llama 3 | 70B | 40GB | `ollama run llama3:70b` |
|
||||
| Phi 3 Mini | 3.8B | 2.3GB | `ollama run phi3` |
|
||||
| Phi 3 Medium | 14B | 7.9GB | `ollama run phi3:medium` |
|
||||
| Gemma | 2B | 1.4GB | `ollama run gemma:2b` |
|
||||
| Gemma | 7B | 4.8GB | `ollama run gemma:7b` |
|
||||
| Llama 2 | 7B | 3.8GB | `ollama run llama2` |
|
||||
| Mistral | 7B | 4.1GB | `ollama run mistral` |
|
||||
| Moondream 2 | 1.4B | 829MB | `ollama run moondream` |
|
||||
| Dolphin Phi | 2.7B | 1.6GB | `ollama run dolphin-phi` |
|
||||
| Phi-2 | 2.7B | 1.7GB | `ollama run phi` |
|
||||
| Neural Chat | 7B | 4.1GB | `ollama run neural-chat` |
|
||||
| Starling | 7B | 4.1GB | `ollama run starling-lm` |
|
||||
| Code Llama | 7B | 3.8GB | `ollama run codellama` |
|
||||
| Llama 2 Uncensored | 7B | 3.8GB | `ollama run llama2-uncensored` |
|
||||
| Llama 2 13B | 13B | 7.3GB | `ollama run llama2:13b` |
|
||||
| Llama 2 70B | 70B | 39GB | `ollama run llama2:70b` |
|
||||
| Orca Mini | 3B | 1.9GB | `ollama run orca-mini` |
|
||||
| Vicuna | 7B | 3.8GB | `ollama run vicuna` |
|
||||
| LLaVA | 7B | 4.5GB | `ollama run llava` |
|
||||
| Solar | 10.7B | 6.1GB | `ollama run solar` |
|
||||
|
||||
> Note: You should have at least 8 GB of RAM available to run the 7B models, 16 GB to run the 13B models, and 32 GB to run the 33B models.
|
||||
|
||||
@@ -96,16 +93,16 @@ See the [guide](docs/import.md) on importing models for more information.
|
||||
|
||||
### Customize a prompt
|
||||
|
||||
Models from the Ollama library can be customized with a prompt. For example, to customize the `llama3` model:
|
||||
Models from the Ollama library can be customized with a prompt. For example, to customize the `llama2` model:
|
||||
|
||||
```
|
||||
ollama pull llama3
|
||||
ollama pull llama2
|
||||
```
|
||||
|
||||
Create a `Modelfile`:
|
||||
|
||||
```
|
||||
FROM llama3
|
||||
FROM llama2
|
||||
|
||||
# set the temperature to 1 [higher is more creative, lower is more coherent]
|
||||
PARAMETER temperature 1
|
||||
@@ -140,7 +137,7 @@ ollama create mymodel -f ./Modelfile
|
||||
### Pull a model
|
||||
|
||||
```
|
||||
ollama pull llama3
|
||||
ollama pull llama2
|
||||
```
|
||||
|
||||
> This command can also be used to update a local model. Only the diff will be pulled.
|
||||
@@ -148,13 +145,13 @@ ollama pull llama3
|
||||
### Remove a model
|
||||
|
||||
```
|
||||
ollama rm llama3
|
||||
ollama rm llama2
|
||||
```
|
||||
|
||||
### Copy a model
|
||||
|
||||
```
|
||||
ollama cp llama3 my-model
|
||||
ollama cp llama2 my-llama2
|
||||
```
|
||||
|
||||
### Multiline input
|
||||
@@ -175,10 +172,10 @@ I'm a basic program that prints the famous "Hello, world!" message to the consol
|
||||
The image features a yellow smiley face, which is likely the central focus of the picture.
|
||||
```
|
||||
|
||||
### Pass the prompt as an argument
|
||||
### Pass in prompt as arguments
|
||||
|
||||
```
|
||||
$ ollama run llama3 "Summarize this file: $(cat README.md)"
|
||||
$ ollama run llama2 "Summarize this file: $(cat README.md)"
|
||||
Ollama is a lightweight, extensible framework for building and running language models on the local machine. It provides a simple API for creating, running, and managing models, as well as a library of pre-built models that can be easily used in a variety of applications.
|
||||
```
|
||||
|
||||
@@ -194,10 +191,25 @@ ollama list
|
||||
|
||||
## Building
|
||||
|
||||
See the [developer guide](https://github.com/ollama/ollama/blob/main/docs/development.md)
|
||||
Install `cmake` and `go`:
|
||||
|
||||
```
|
||||
brew install cmake go
|
||||
```
|
||||
|
||||
Then generate dependencies:
|
||||
```
|
||||
go generate ./...
|
||||
```
|
||||
Then build the binary:
|
||||
```
|
||||
go build .
|
||||
```
|
||||
|
||||
More detailed instructions can be found in the [developer guide](https://github.com/jmorganca/ollama/blob/main/docs/development.md)
|
||||
|
||||
|
||||
### Running local builds
|
||||
|
||||
Next, start the server:
|
||||
|
||||
```
|
||||
@@ -207,7 +219,7 @@ Next, start the server:
|
||||
Finally, in a separate shell, run a model:
|
||||
|
||||
```
|
||||
./ollama run llama3
|
||||
./ollama run llama2
|
||||
```
|
||||
|
||||
## REST API
|
||||
@@ -218,7 +230,7 @@ Ollama has a REST API for running and managing models.
|
||||
|
||||
```
|
||||
curl http://localhost:11434/api/generate -d '{
|
||||
"model": "llama3",
|
||||
"model": "llama2",
|
||||
"prompt":"Why is the sky blue?"
|
||||
}'
|
||||
```
|
||||
@@ -227,7 +239,7 @@ curl http://localhost:11434/api/generate -d '{
|
||||
|
||||
```
|
||||
curl http://localhost:11434/api/chat -d '{
|
||||
"model": "llama3",
|
||||
"model": "mistral",
|
||||
"messages": [
|
||||
{ "role": "user", "content": "why is the sky blue?" }
|
||||
]
|
||||
@@ -236,55 +248,26 @@ curl http://localhost:11434/api/chat -d '{
|
||||
|
||||
See the [API documentation](./docs/api.md) for all endpoints.
|
||||
|
||||
## Integrations
|
||||
|
||||
- [ollama-python](https://github.com/jmorganca/ollama-python)
|
||||
|
||||
## Community Integrations
|
||||
|
||||
### Web & Desktop
|
||||
|
||||
- [Open WebUI](https://github.com/open-webui/open-webui)
|
||||
- [Enchanted (macOS native)](https://github.com/AugustDev/enchanted)
|
||||
- [Hollama](https://github.com/fmaclen/hollama)
|
||||
- [Lollms-Webui](https://github.com/ParisNeo/lollms-webui)
|
||||
- [LibreChat](https://github.com/danny-avila/LibreChat)
|
||||
- [Bionic GPT](https://github.com/bionic-gpt/bionic-gpt)
|
||||
- [HTML UI](https://github.com/rtcfirefly/ollama-ui)
|
||||
- [Saddle](https://github.com/jikkuatwork/saddle)
|
||||
- [Chatbot UI](https://github.com/ivanfioravanti/chatbot-ollama)
|
||||
- [Chatbot UI v2](https://github.com/mckaywrigley/chatbot-ui)
|
||||
- [Typescript UI](https://github.com/ollama-interface/Ollama-Gui?tab=readme-ov-file)
|
||||
- [Minimalistic React UI for Ollama Models](https://github.com/richawo/minimal-llm-ui)
|
||||
- [Web UI](https://github.com/ollama-webui/ollama-webui)
|
||||
- [Ollamac](https://github.com/kevinhermawan/Ollamac)
|
||||
- [big-AGI](https://github.com/enricoros/big-AGI/blob/main/docs/config-local-ollama.md)
|
||||
- [big-AGI](https://github.com/enricoros/big-agi/blob/main/docs/config-ollama.md)
|
||||
- [Cheshire Cat assistant framework](https://github.com/cheshire-cat-ai/core)
|
||||
- [Amica](https://github.com/semperai/amica)
|
||||
- [chatd](https://github.com/BruceMacD/chatd)
|
||||
- [Ollama-SwiftUI](https://github.com/kghandour/Ollama-SwiftUI)
|
||||
- [Dify.AI](https://github.com/langgenius/dify)
|
||||
- [MindMac](https://mindmac.app)
|
||||
- [NextJS Web Interface for Ollama](https://github.com/jakobhoeg/nextjs-ollama-llm-ui)
|
||||
- [Msty](https://msty.app)
|
||||
- [Chatbox](https://github.com/Bin-Huang/Chatbox)
|
||||
- [WinForm Ollama Copilot](https://github.com/tgraupmann/WinForm_Ollama_Copilot)
|
||||
- [NextChat](https://github.com/ChatGPTNextWeb/ChatGPT-Next-Web) with [Get Started Doc](https://docs.nextchat.dev/models/ollama)
|
||||
- [Alpaca WebUI](https://github.com/mmo80/alpaca-webui)
|
||||
- [OllamaGUI](https://github.com/enoch1118/ollamaGUI)
|
||||
- [OpenAOE](https://github.com/InternLM/OpenAOE)
|
||||
- [Odin Runes](https://github.com/leonid20000/OdinRunes)
|
||||
- [LLM-X](https://github.com/mrdjohnson/llm-x) (Progressive Web App)
|
||||
- [AnythingLLM (Docker + MacOs/Windows/Linux native app)](https://github.com/Mintplex-Labs/anything-llm)
|
||||
- [Ollama Basic Chat: Uses HyperDiv Reactive UI](https://github.com/rapidarchitect/ollama_basic_chat)
|
||||
- [Ollama-chats RPG](https://github.com/drazdra/ollama-chats)
|
||||
- [QA-Pilot](https://github.com/reid41/QA-Pilot) (Chat with Code Repository)
|
||||
- [ChatOllama](https://github.com/sugarforever/chat-ollama) (Open Source Chatbot based on Ollama with Knowledge Bases)
|
||||
- [CRAG Ollama Chat](https://github.com/Nagi-ovo/CRAG-Ollama-Chat) (Simple Web Search with Corrective RAG)
|
||||
- [RAGFlow](https://github.com/infiniflow/ragflow) (Open-source Retrieval-Augmented Generation engine based on deep document understanding)
|
||||
- [StreamDeploy](https://github.com/StreamDeploy-DevRel/streamdeploy-llm-app-scaffold) (LLM Application Scaffold)
|
||||
- [chat](https://github.com/swuecho/chat) (chat web app for teams)
|
||||
- [Lobe Chat](https://github.com/lobehub/lobe-chat) with [Integrating Doc](https://lobehub.com/docs/self-hosting/examples/ollama)
|
||||
- [Ollama RAG Chatbot](https://github.com/datvodinh/rag-chatbot.git) (Local Chat with multiple PDFs using Ollama and RAG)
|
||||
- [BrainSoup](https://www.nurgo-software.com/products/brainsoup) (Flexible native client with RAG & multi-agent automation)
|
||||
- [macai](https://github.com/Renset/macai) (macOS client for Ollama, ChatGPT, and other compatible API back-ends)
|
||||
- [Olpaka](https://github.com/Otacon/olpaka) (User-friendly Flutter Web App for Ollama)
|
||||
- [OllamaSpring](https://github.com/CrazyNeil/OllamaSpring) (Ollama Client for macOS)
|
||||
|
||||
|
||||
### Terminal
|
||||
|
||||
@@ -293,37 +276,23 @@ See the [API documentation](./docs/api.md) for all endpoints.
|
||||
- [Emacs client](https://github.com/zweifisch/ollama)
|
||||
- [gen.nvim](https://github.com/David-Kunz/gen.nvim)
|
||||
- [ollama.nvim](https://github.com/nomnivore/ollama.nvim)
|
||||
- [ollero.nvim](https://github.com/marco-souza/ollero.nvim)
|
||||
- [ollama-chat.nvim](https://github.com/gerazov/ollama-chat.nvim)
|
||||
- [ogpt.nvim](https://github.com/huynle/ogpt.nvim)
|
||||
- [gptel Emacs client](https://github.com/karthink/gptel)
|
||||
- [Oatmeal](https://github.com/dustinblackman/oatmeal)
|
||||
- [cmdh](https://github.com/pgibler/cmdh)
|
||||
- [ooo](https://github.com/npahlfer/ooo)
|
||||
- [shell-pilot](https://github.com/reid41/shell-pilot)
|
||||
- [tenere](https://github.com/pythops/tenere)
|
||||
- [llm-ollama](https://github.com/taketwo/llm-ollama) for [Datasette's LLM CLI](https://llm.datasette.io/en/stable/).
|
||||
- [typechat-cli](https://github.com/anaisbetts/typechat-cli)
|
||||
- [ShellOracle](https://github.com/djcopley/ShellOracle)
|
||||
- [tlm](https://github.com/yusufcanb/tlm)
|
||||
- [podman-ollama](https://github.com/ericcurtin/podman-ollama)
|
||||
|
||||
### Database
|
||||
|
||||
- [MindsDB](https://github.com/mindsdb/mindsdb/blob/staging/mindsdb/integrations/handlers/ollama_handler/README.md) (Connects Ollama models with nearly 200 data platforms and apps)
|
||||
- [chromem-go](https://github.com/philippgille/chromem-go/blob/v0.5.0/embed_ollama.go) with [example](https://github.com/philippgille/chromem-go/tree/v0.5.0/examples/rag-wikipedia-ollama)
|
||||
- [MindsDB](https://github.com/mindsdb/mindsdb/blob/staging/mindsdb/integrations/handlers/ollama_handler/README.md)
|
||||
|
||||
### Package managers
|
||||
|
||||
- [Pacman](https://archlinux.org/packages/extra/x86_64/ollama/)
|
||||
- [Helm Chart](https://artifacthub.io/packages/helm/ollama-helm/ollama)
|
||||
- [Guix channel](https://codeberg.org/tusharhero/ollama-guix)
|
||||
|
||||
### Libraries
|
||||
|
||||
- [LangChain](https://python.langchain.com/docs/integrations/llms/ollama) and [LangChain.js](https://js.langchain.com/docs/modules/model_io/models/llms/integrations/ollama) with [example](https://js.langchain.com/docs/use_cases/question_answering/local_retrieval_qa)
|
||||
- [LangChainGo](https://github.com/tmc/langchaingo/) with [example](https://github.com/tmc/langchaingo/tree/main/examples/ollama-completion-example)
|
||||
- [LangChain4j](https://github.com/langchain4j/langchain4j) with [example](https://github.com/langchain4j/langchain4j-examples/tree/main/ollama-examples/src/main/java)
|
||||
- [LlamaIndex](https://gpt-index.readthedocs.io/en/stable/examples/llm/ollama.html)
|
||||
- [LiteLLM](https://github.com/BerriAI/litellm)
|
||||
- [OllamaSharp for .NET](https://github.com/awaescher/OllamaSharp)
|
||||
@@ -337,15 +306,8 @@ See the [API documentation](./docs/api.md) for all endpoints.
|
||||
- [LangChainDart](https://github.com/davidmigloz/langchain_dart)
|
||||
- [Semantic Kernel - Python](https://github.com/microsoft/semantic-kernel/tree/main/python/semantic_kernel/connectors/ai/ollama)
|
||||
- [Haystack](https://github.com/deepset-ai/haystack-integrations/blob/main/integrations/ollama.md)
|
||||
- [Elixir LangChain](https://github.com/brainlid/langchain)
|
||||
- [Ollama for R - rollama](https://github.com/JBGruber/rollama)
|
||||
- [Ollama for R - ollama-r](https://github.com/hauselin/ollama-r)
|
||||
- [Ollama-ex for Elixir](https://github.com/lebrunel/ollama-ex)
|
||||
- [Ollama Connector for SAP ABAP](https://github.com/b-tocs/abap_btocs_ollama)
|
||||
- [Testcontainers](https://testcontainers.com/modules/ollama/)
|
||||
- [Portkey](https://portkey.ai/docs/welcome/integration-guides/ollama)
|
||||
- [PromptingTools.jl](https://github.com/svilupp/PromptingTools.jl) with an [example](https://svilupp.github.io/PromptingTools.jl/dev/examples/working_with_ollama)
|
||||
- [LlamaScript](https://github.com/Project-Llama/llamascript)
|
||||
|
||||
|
||||
### Mobile
|
||||
|
||||
- [Enchanted](https://github.com/AugustDev/enchanted)
|
||||
@@ -358,27 +320,11 @@ See the [API documentation](./docs/api.md) for all endpoints.
|
||||
- [Continue](https://github.com/continuedev/continue)
|
||||
- [Obsidian Ollama plugin](https://github.com/hinterdupfinger/obsidian-ollama)
|
||||
- [Logseq Ollama plugin](https://github.com/omagdy7/ollama-logseq)
|
||||
- [NotesOllama](https://github.com/andersrex/notesollama) (Apple Notes Ollama plugin)
|
||||
- [Dagger Chatbot](https://github.com/samalba/dagger-chatbot)
|
||||
- [Discord AI Bot](https://github.com/mekb-turtle/discord-ai-bot)
|
||||
- [Ollama Telegram Bot](https://github.com/ruecat/ollama-telegram)
|
||||
- [Hass Ollama Conversation](https://github.com/ej52/hass-ollama-conversation)
|
||||
- [Rivet plugin](https://github.com/abrenneke/rivet-plugin-ollama)
|
||||
- [Obsidian BMO Chatbot plugin](https://github.com/longy2k/obsidian-bmo-chatbot)
|
||||
- [Cliobot](https://github.com/herval/cliobot) (Telegram bot with Ollama support)
|
||||
- [Copilot for Obsidian plugin](https://github.com/logancyang/obsidian-copilot)
|
||||
- [Obsidian Local GPT plugin](https://github.com/pfrankov/obsidian-local-gpt)
|
||||
- [Open Interpreter](https://docs.openinterpreter.com/language-model-setup/local-models/ollama)
|
||||
- [Llama Coder](https://github.com/ex3ndr/llama-coder) (Copilot alternative using Ollama)
|
||||
- [Ollama Copilot](https://github.com/bernardo-bruning/ollama-copilot) (Proxy that allows you to use ollama as a copilot like Github copilot)
|
||||
- [twinny](https://github.com/rjmacarthy/twinny) (Copilot and Copilot chat alternative using Ollama)
|
||||
- [Wingman-AI](https://github.com/RussellCanfield/wingman-ai) (Copilot code and chat alternative using Ollama and HuggingFace)
|
||||
- [Page Assist](https://github.com/n4ze3m/page-assist) (Chrome Extension)
|
||||
- [AI Telegram Bot](https://github.com/tusharhero/aitelegrambot) (Telegram bot using Ollama in backend)
|
||||
- [AI ST Completion](https://github.com/yaroslavyaroslav/OpenAI-sublime-text) (Sublime Text 4 AI assistant plugin with Ollama support)
|
||||
- [Discord-Ollama Chat Bot](https://github.com/kevinthedang/discord-ollama) (Generalized TypeScript Discord Bot w/ Tuning Documentation)
|
||||
- [Discord AI chat/moderation bot](https://github.com/rapmd73/Companion) Chat/moderation bot written in python. Uses Ollama to create personalities.
|
||||
|
||||
### Supported backends
|
||||
- [llama.cpp](https://github.com/ggerganov/llama.cpp) project founded by Georgi Gerganov.
|
||||
|
||||
- [Obsidian BMO Chatbot plugin](https://github.com/longy2k/obsidian-bmo-chatbot)
|
||||
- [Open Interpreter](https://docs.openinterpreter.com/language-model-setup/local-models/ollama)
|
160
api/client.go
@@ -1,16 +1,3 @@
|
||||
// Package api implements the client-side API for code wishing to interact
|
||||
// with the ollama service. The methods of the [Client] type correspond to
|
||||
// the ollama REST API as described in [the API documentation].
|
||||
// The ollama command-line client itself uses this package to interact with
|
||||
// the backend service.
|
||||
//
|
||||
// # Examples
|
||||
//
|
||||
// Several examples of using this package are available [in the GitHub
|
||||
// repository].
|
||||
//
|
||||
// [the API documentation]: https://github.com/ollama/ollama/blob/main/docs/api.md
|
||||
// [in the GitHub repository]: https://github.com/ollama/ollama/tree/main/examples
|
||||
package api
|
||||
|
||||
import (
|
||||
@@ -18,6 +5,7 @@ import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
@@ -25,18 +13,15 @@ import (
|
||||
"net/url"
|
||||
"os"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/ollama/ollama/format"
|
||||
"github.com/ollama/ollama/version"
|
||||
"github.com/jmorganca/ollama/format"
|
||||
"github.com/jmorganca/ollama/version"
|
||||
)
|
||||
|
||||
// Client encapsulates client state for interacting with the ollama
|
||||
// service. Use [ClientFromEnvironment] to create new Clients.
|
||||
type Client struct {
|
||||
base *url.URL
|
||||
http *http.Client
|
||||
http http.Client
|
||||
}
|
||||
|
||||
func checkError(resp *http.Response, body []byte) error {
|
||||
@@ -55,46 +40,13 @@ func checkError(resp *http.Response, body []byte) error {
|
||||
return apiError
|
||||
}
|
||||
|
||||
// ClientFromEnvironment creates a new [Client] using configuration from the
|
||||
// environment variable OLLAMA_HOST, which points to the network host and
|
||||
// port on which the ollama service is listenting. The format of this variable
|
||||
// is:
|
||||
//
|
||||
// <scheme>://<host>:<port>
|
||||
//
|
||||
// If the variable is not specified, a default ollama host and port will be
|
||||
// used.
|
||||
func ClientFromEnvironment() (*Client, error) {
|
||||
ollamaHost, err := GetOllamaHost()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Client{
|
||||
base: &url.URL{
|
||||
Scheme: ollamaHost.Scheme,
|
||||
Host: net.JoinHostPort(ollamaHost.Host, ollamaHost.Port),
|
||||
},
|
||||
http: http.DefaultClient,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type OllamaHost struct {
|
||||
Scheme string
|
||||
Host string
|
||||
Port string
|
||||
}
|
||||
|
||||
func GetOllamaHost() (OllamaHost, error) {
|
||||
defaultPort := "11434"
|
||||
|
||||
hostVar := os.Getenv("OLLAMA_HOST")
|
||||
hostVar = strings.TrimSpace(strings.Trim(strings.TrimSpace(hostVar), "\"'"))
|
||||
|
||||
scheme, hostport, ok := strings.Cut(hostVar, "://")
|
||||
scheme, hostport, ok := strings.Cut(os.Getenv("OLLAMA_HOST"), "://")
|
||||
switch {
|
||||
case !ok:
|
||||
scheme, hostport = "http", hostVar
|
||||
scheme, hostport = "http", os.Getenv("OLLAMA_HOST")
|
||||
case scheme == "http":
|
||||
defaultPort = "80"
|
||||
case scheme == "https":
|
||||
@@ -114,22 +66,30 @@ func GetOllamaHost() (OllamaHost, error) {
|
||||
}
|
||||
}
|
||||
|
||||
if portNum, err := strconv.ParseInt(port, 10, 32); err != nil || portNum > 65535 || portNum < 0 {
|
||||
return OllamaHost{}, ErrInvalidHostPort
|
||||
client := Client{
|
||||
base: &url.URL{
|
||||
Scheme: scheme,
|
||||
Host: net.JoinHostPort(host, port),
|
||||
},
|
||||
}
|
||||
|
||||
return OllamaHost{
|
||||
Scheme: scheme,
|
||||
Host: host,
|
||||
Port: port,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func NewClient(base *url.URL, http *http.Client) *Client {
|
||||
return &Client{
|
||||
base: base,
|
||||
http: http,
|
||||
mockRequest, err := http.NewRequest(http.MethodHead, client.base.String(), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
proxyURL, err := http.ProxyFromEnvironment(mockRequest)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
client.http = http.Client{
|
||||
Transport: &http.Transport{
|
||||
Proxy: http.ProxyURL(proxyURL),
|
||||
},
|
||||
}
|
||||
|
||||
return &client, nil
|
||||
}
|
||||
|
||||
func (c *Client) do(ctx context.Context, method, path string, reqData, respData any) error {
|
||||
@@ -248,14 +208,8 @@ func (c *Client) stream(ctx context.Context, method, path string, data any, fn f
|
||||
return nil
|
||||
}
|
||||
|
||||
// GenerateResponseFunc is a function that [Client.Generate] invokes every time
|
||||
// a response is received from the service. If this function returns an error,
|
||||
// [Client.Generate] will stop generating and return this error.
|
||||
type GenerateResponseFunc func(GenerateResponse) error
|
||||
|
||||
// Generate generates a response for a given prompt. The req parameter should
|
||||
// be populated with prompt details. fn is called for each response (there may
|
||||
// be multiple responses, e.g. in case streaming is enabled).
|
||||
func (c *Client) Generate(ctx context.Context, req *GenerateRequest, fn GenerateResponseFunc) error {
|
||||
return c.stream(ctx, http.MethodPost, "/api/generate", req, func(bts []byte) error {
|
||||
var resp GenerateResponse
|
||||
@@ -267,15 +221,8 @@ func (c *Client) Generate(ctx context.Context, req *GenerateRequest, fn Generate
|
||||
})
|
||||
}
|
||||
|
||||
// ChatResponseFunc is a function that [Client.Chat] invokes every time
|
||||
// a response is received from the service. If this function returns an error,
|
||||
// [Client.Chat] will stop generating and return this error.
|
||||
type ChatResponseFunc func(ChatResponse) error
|
||||
|
||||
// Chat generates the next message in a chat. [ChatRequest] may contain a
|
||||
// sequence of messages which can be used to maintain chat history with a model.
|
||||
// fn is called for each response (there may be multiple responses, e.g. if case
|
||||
// streaming is enabled).
|
||||
func (c *Client) Chat(ctx context.Context, req *ChatRequest, fn ChatResponseFunc) error {
|
||||
return c.stream(ctx, http.MethodPost, "/api/chat", req, func(bts []byte) error {
|
||||
var resp ChatResponse
|
||||
@@ -287,14 +234,8 @@ func (c *Client) Chat(ctx context.Context, req *ChatRequest, fn ChatResponseFunc
|
||||
})
|
||||
}
|
||||
|
||||
// PullProgressFunc is a function that [Client.Pull] invokes every time there
|
||||
// is progress with a "pull" request sent to the service. If this function
|
||||
// returns an error, [Client.Pull] will stop the process and return this error.
|
||||
type PullProgressFunc func(ProgressResponse) error
|
||||
|
||||
// Pull downloads a model from the ollama library. fn is called each time
|
||||
// progress is made on the request and can be used to display a progress bar,
|
||||
// etc.
|
||||
func (c *Client) Pull(ctx context.Context, req *PullRequest, fn PullProgressFunc) error {
|
||||
return c.stream(ctx, http.MethodPost, "/api/pull", req, func(bts []byte) error {
|
||||
var resp ProgressResponse
|
||||
@@ -306,14 +247,8 @@ func (c *Client) Pull(ctx context.Context, req *PullRequest, fn PullProgressFunc
|
||||
})
|
||||
}
|
||||
|
||||
// PushProgressFunc is a function that [Client.Push] invokes when progress is
|
||||
// made.
|
||||
// It's similar to other progress function types like [PullProgressFunc].
|
||||
type PushProgressFunc func(ProgressResponse) error
|
||||
|
||||
// Push uploads a model to the model library; requires registering for ollama.ai
|
||||
// and adding a public key first. fn is called each time progress is made on
|
||||
// the request and can be used to display a progress bar, etc.
|
||||
func (c *Client) Push(ctx context.Context, req *PushRequest, fn PushProgressFunc) error {
|
||||
return c.stream(ctx, http.MethodPost, "/api/push", req, func(bts []byte) error {
|
||||
var resp ProgressResponse
|
||||
@@ -325,15 +260,8 @@ func (c *Client) Push(ctx context.Context, req *PushRequest, fn PushProgressFunc
|
||||
})
|
||||
}
|
||||
|
||||
// CreateProgressFunc is a function that [Client.Create] invokes when progress
|
||||
// is made.
|
||||
// It's similar to other progress function types like [PullProgressFunc].
|
||||
type CreateProgressFunc func(ProgressResponse) error
|
||||
|
||||
// Create creates a model from a [Modelfile]. fn is a progress function that
|
||||
// behaves similarly to other methods (see [Client.Pull]).
|
||||
//
|
||||
// [Modelfile]: https://github.com/ollama/ollama/blob/main/docs/modelfile.md
|
||||
func (c *Client) Create(ctx context.Context, req *CreateRequest, fn CreateProgressFunc) error {
|
||||
return c.stream(ctx, http.MethodPost, "/api/create", req, func(bts []byte) error {
|
||||
var resp ProgressResponse
|
||||
@@ -345,7 +273,6 @@ func (c *Client) Create(ctx context.Context, req *CreateRequest, fn CreateProgre
|
||||
})
|
||||
}
|
||||
|
||||
// List lists models that are available locally.
|
||||
func (c *Client) List(ctx context.Context) (*ListResponse, error) {
|
||||
var lr ListResponse
|
||||
if err := c.do(ctx, http.MethodGet, "/api/tags", nil, &lr); err != nil {
|
||||
@@ -354,17 +281,6 @@ func (c *Client) List(ctx context.Context) (*ListResponse, error) {
|
||||
return &lr, nil
|
||||
}
|
||||
|
||||
// List running models.
|
||||
func (c *Client) ListRunning(ctx context.Context) (*ListResponse, error) {
|
||||
var lr ListResponse
|
||||
if err := c.do(ctx, http.MethodGet, "/api/ps", nil, &lr); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &lr, nil
|
||||
}
|
||||
|
||||
// Copy copies a model - creating a model with another name from an existing
|
||||
// model.
|
||||
func (c *Client) Copy(ctx context.Context, req *CopyRequest) error {
|
||||
if err := c.do(ctx, http.MethodPost, "/api/copy", req, nil); err != nil {
|
||||
return err
|
||||
@@ -372,7 +288,6 @@ func (c *Client) Copy(ctx context.Context, req *CopyRequest) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Delete deletes a model and its data.
|
||||
func (c *Client) Delete(ctx context.Context, req *DeleteRequest) error {
|
||||
if err := c.do(ctx, http.MethodDelete, "/api/delete", req, nil); err != nil {
|
||||
return err
|
||||
@@ -380,7 +295,6 @@ func (c *Client) Delete(ctx context.Context, req *DeleteRequest) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Show obtains model information, including details, modelfile, license etc.
|
||||
func (c *Client) Show(ctx context.Context, req *ShowRequest) (*ShowResponse, error) {
|
||||
var resp ShowResponse
|
||||
if err := c.do(ctx, http.MethodPost, "/api/show", req, &resp); err != nil {
|
||||
@@ -389,16 +303,12 @@ func (c *Client) Show(ctx context.Context, req *ShowRequest) (*ShowResponse, err
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
// Hearbeat checks if the server has started and is responsive; if yes, it
|
||||
// returns nil, otherwise an error.
|
||||
func (c *Client) Heartbeat(ctx context.Context) error {
|
||||
if err := c.do(ctx, http.MethodHead, "/", nil, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Embeddings generates embeddings from a model.
|
||||
func (c *Client) Embeddings(ctx context.Context, req *EmbeddingRequest) (*EmbeddingResponse, error) {
|
||||
var resp EmbeddingResponse
|
||||
if err := c.do(ctx, http.MethodPost, "/api/embeddings", req, &resp); err != nil {
|
||||
@@ -407,13 +317,21 @@ func (c *Client) Embeddings(ctx context.Context, req *EmbeddingRequest) (*Embedd
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
// CreateBlob creates a blob from a file on the server. digest is the
|
||||
// expected SHA256 digest of the file, and r represents the file.
|
||||
func (c *Client) CreateBlob(ctx context.Context, digest string, r io.Reader) error {
|
||||
return c.do(ctx, http.MethodPost, fmt.Sprintf("/api/blobs/%s", digest), r, nil)
|
||||
if err := c.do(ctx, http.MethodHead, fmt.Sprintf("/api/blobs/%s", digest), nil, nil); err != nil {
|
||||
var statusError StatusError
|
||||
if !errors.As(err, &statusError) || statusError.StatusCode != http.StatusNotFound {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := c.do(ctx, http.MethodPost, fmt.Sprintf("/api/blobs/%s", digest), r, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Version returns the Ollama server version as a string.
|
||||
func (c *Client) Version(ctx context.Context) (string, error) {
|
||||
var version struct {
|
||||
Version string `json:"version"`
|
||||
|
@@ -1,12 +1,6 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
import "testing"
|
||||
|
||||
func TestClientFromEnvironment(t *testing.T) {
|
||||
type testCase struct {
|
||||
@@ -46,40 +40,4 @@ func TestClientFromEnvironment(t *testing.T) {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
hostTestCases := map[string]*testCase{
|
||||
"empty": {value: "", expect: "127.0.0.1:11434"},
|
||||
"only address": {value: "1.2.3.4", expect: "1.2.3.4:11434"},
|
||||
"only port": {value: ":1234", expect: ":1234"},
|
||||
"address and port": {value: "1.2.3.4:1234", expect: "1.2.3.4:1234"},
|
||||
"hostname": {value: "example.com", expect: "example.com:11434"},
|
||||
"hostname and port": {value: "example.com:1234", expect: "example.com:1234"},
|
||||
"zero port": {value: ":0", expect: ":0"},
|
||||
"too large port": {value: ":66000", err: ErrInvalidHostPort},
|
||||
"too small port": {value: ":-1", err: ErrInvalidHostPort},
|
||||
"ipv6 localhost": {value: "[::1]", expect: "[::1]:11434"},
|
||||
"ipv6 world open": {value: "[::]", expect: "[::]:11434"},
|
||||
"ipv6 no brackets": {value: "::1", expect: "[::1]:11434"},
|
||||
"ipv6 + port": {value: "[::1]:1337", expect: "[::1]:1337"},
|
||||
"extra space": {value: " 1.2.3.4 ", expect: "1.2.3.4:11434"},
|
||||
"extra quotes": {value: "\"1.2.3.4\"", expect: "1.2.3.4:11434"},
|
||||
"extra space+quotes": {value: " \" 1.2.3.4 \" ", expect: "1.2.3.4:11434"},
|
||||
"extra single quotes": {value: "'1.2.3.4'", expect: "1.2.3.4:11434"},
|
||||
}
|
||||
|
||||
for k, v := range hostTestCases {
|
||||
t.Run(k, func(t *testing.T) {
|
||||
t.Setenv("OLLAMA_HOST", v.value)
|
||||
|
||||
oh, err := GetOllamaHost()
|
||||
if err != v.err {
|
||||
t.Fatalf("expected %s, got %s", v.err, err)
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
host := net.JoinHostPort(oh.Host, oh.Port)
|
||||
assert.Equal(t, v.expect, host, fmt.Sprintf("%s: expected %s, got %s", k, v.expect, host))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
334
api/types.go
@@ -2,9 +2,7 @@ package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"math"
|
||||
"os"
|
||||
"reflect"
|
||||
@@ -13,7 +11,6 @@ import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// StatusError is an error with and HTTP status code.
|
||||
type StatusError struct {
|
||||
StatusCode int
|
||||
Status string
|
||||
@@ -34,90 +31,41 @@ func (e StatusError) Error() string {
|
||||
}
|
||||
}
|
||||
|
||||
// ImageData represents the raw binary data of an image file.
|
||||
type ImageData []byte
|
||||
|
||||
// GenerateRequest describes a request sent by [Client.Generate]. While you
|
||||
// have to specify the Model and Prompt fields, all the other fields have
|
||||
// reasonable defaults for basic uses.
|
||||
type GenerateRequest struct {
|
||||
// Model is the model name; it should be a name familiar to Ollama from
|
||||
// the library at https://ollama.com/library
|
||||
Model string `json:"model"`
|
||||
Model string `json:"model"`
|
||||
Prompt string `json:"prompt"`
|
||||
System string `json:"system"`
|
||||
Template string `json:"template"`
|
||||
Context []int `json:"context,omitempty"`
|
||||
Stream *bool `json:"stream,omitempty"`
|
||||
Raw bool `json:"raw,omitempty"`
|
||||
Format string `json:"format"`
|
||||
Images []ImageData `json:"images,omitempty"`
|
||||
|
||||
// Prompt is the textual prompt to send to the model.
|
||||
Prompt string `json:"prompt"`
|
||||
|
||||
// System overrides the model's default system message/prompt.
|
||||
System string `json:"system"`
|
||||
|
||||
// Template overrides the model's default prompt template.
|
||||
Template string `json:"template"`
|
||||
|
||||
// Context is the context parameter returned from a previous call to
|
||||
// Generate call. It can be used to keep a short conversational memory.
|
||||
Context []int `json:"context,omitempty"`
|
||||
|
||||
// Stream specifies whether the response is streaming; it is true by default.
|
||||
Stream *bool `json:"stream,omitempty"`
|
||||
|
||||
// Raw set to true means that no formatting will be applied to the prompt.
|
||||
Raw bool `json:"raw,omitempty"`
|
||||
|
||||
// Format specifies the format to return a response in.
|
||||
Format string `json:"format"`
|
||||
|
||||
// KeepAlive controls how long the model will stay loaded in memory following
|
||||
// this request.
|
||||
KeepAlive *Duration `json:"keep_alive,omitempty"`
|
||||
|
||||
// Images is an optional list of base64-encoded images accompanying this
|
||||
// request, for multimodal models.
|
||||
Images []ImageData `json:"images,omitempty"`
|
||||
|
||||
// Options lists model-specific options. For example, temperature can be
|
||||
// set through this field, if the model supports it.
|
||||
Options map[string]interface{} `json:"options"`
|
||||
}
|
||||
|
||||
// ChatRequest describes a request sent by [Client.Chat].
|
||||
type ChatRequest struct {
|
||||
// Model is the model name, as in [GenerateRequest].
|
||||
Model string `json:"model"`
|
||||
|
||||
// Messages is the messages of the chat - can be used to keep a chat memory.
|
||||
Model string `json:"model"`
|
||||
Messages []Message `json:"messages"`
|
||||
Stream *bool `json:"stream,omitempty"`
|
||||
Format string `json:"format"`
|
||||
|
||||
// Stream enable streaming of returned response; true by default.
|
||||
Stream *bool `json:"stream,omitempty"`
|
||||
|
||||
// Format is the format to return the response in (e.g. "json").
|
||||
Format string `json:"format"`
|
||||
|
||||
// KeepAlive controls how long the model will stay loaded into memory
|
||||
// followin the request.
|
||||
KeepAlive *Duration `json:"keep_alive,omitempty"`
|
||||
|
||||
// Options lists model-specific options.
|
||||
Options map[string]interface{} `json:"options"`
|
||||
}
|
||||
|
||||
// Message is a single message in a chat sequence. The message contains the
|
||||
// role ("system", "user", or "assistant"), the content and an optional list
|
||||
// of images.
|
||||
type Message struct {
|
||||
Role string `json:"role"`
|
||||
Role string `json:"role"` // one of ["system", "user", "assistant"]
|
||||
Content string `json:"content"`
|
||||
Images []ImageData `json:"images,omitempty"`
|
||||
}
|
||||
|
||||
// ChatResponse is the response returned by [Client.Chat]. Its fields are
|
||||
// similar to [GenerateResponse].
|
||||
type ChatResponse struct {
|
||||
Model string `json:"model"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
Message Message `json:"message"`
|
||||
DoneReason string `json:"done_reason,omitempty"`
|
||||
Model string `json:"model"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
Message Message `json:"message"`
|
||||
|
||||
Done bool `json:"done"`
|
||||
|
||||
@@ -133,8 +81,7 @@ type Metrics struct {
|
||||
EvalDuration time.Duration `json:"eval_duration,omitempty"`
|
||||
}
|
||||
|
||||
// Options specified in [GenerateRequest], if you add a new option here add it
|
||||
// to the API docs also.
|
||||
// Options specfied in GenerateRequest, if you add a new option here add it to the API docs also
|
||||
type Options struct {
|
||||
Runner
|
||||
|
||||
@@ -160,57 +107,45 @@ type Options struct {
|
||||
|
||||
// Runner options which must be set when the model is loaded into memory
|
||||
type Runner struct {
|
||||
UseNUMA bool `json:"numa,omitempty"`
|
||||
NumCtx int `json:"num_ctx,omitempty"`
|
||||
NumBatch int `json:"num_batch,omitempty"`
|
||||
NumGPU int `json:"num_gpu,omitempty"`
|
||||
MainGPU int `json:"main_gpu,omitempty"`
|
||||
LowVRAM bool `json:"low_vram,omitempty"`
|
||||
F16KV bool `json:"f16_kv,omitempty"`
|
||||
LogitsAll bool `json:"logits_all,omitempty"`
|
||||
VocabOnly bool `json:"vocab_only,omitempty"`
|
||||
UseMMap bool `json:"use_mmap,omitempty"`
|
||||
UseMLock bool `json:"use_mlock,omitempty"`
|
||||
NumThread int `json:"num_thread,omitempty"`
|
||||
UseNUMA bool `json:"numa,omitempty"`
|
||||
NumCtx int `json:"num_ctx,omitempty"`
|
||||
NumBatch int `json:"num_batch,omitempty"`
|
||||
NumGQA int `json:"num_gqa,omitempty"`
|
||||
NumGPU int `json:"num_gpu,omitempty"`
|
||||
MainGPU int `json:"main_gpu,omitempty"`
|
||||
LowVRAM bool `json:"low_vram,omitempty"`
|
||||
F16KV bool `json:"f16_kv,omitempty"`
|
||||
LogitsAll bool `json:"logits_all,omitempty"`
|
||||
VocabOnly bool `json:"vocab_only,omitempty"`
|
||||
UseMMap bool `json:"use_mmap,omitempty"`
|
||||
UseMLock bool `json:"use_mlock,omitempty"`
|
||||
EmbeddingOnly bool `json:"embedding_only,omitempty"`
|
||||
RopeFrequencyBase float32 `json:"rope_frequency_base,omitempty"`
|
||||
RopeFrequencyScale float32 `json:"rope_frequency_scale,omitempty"`
|
||||
NumThread int `json:"num_thread,omitempty"`
|
||||
}
|
||||
|
||||
// EmbeddingRequest is the request passed to [Client.Embeddings].
|
||||
type EmbeddingRequest struct {
|
||||
// Model is the model name.
|
||||
Model string `json:"model"`
|
||||
|
||||
// Prompt is the textual prompt to embed.
|
||||
Model string `json:"model"`
|
||||
Prompt string `json:"prompt"`
|
||||
|
||||
// KeepAlive controls how long the model will stay loaded in memory following
|
||||
// this request.
|
||||
KeepAlive *Duration `json:"keep_alive,omitempty"`
|
||||
|
||||
// Options lists model-specific options.
|
||||
Options map[string]interface{} `json:"options"`
|
||||
}
|
||||
|
||||
// EmbeddingResponse is the response from [Client.Embeddings].
|
||||
type EmbeddingResponse struct {
|
||||
Embedding []float64 `json:"embedding"`
|
||||
}
|
||||
|
||||
// CreateRequest is the request passed to [Client.Create].
|
||||
type CreateRequest struct {
|
||||
Model string `json:"model"`
|
||||
Path string `json:"path"`
|
||||
Modelfile string `json:"modelfile"`
|
||||
Stream *bool `json:"stream,omitempty"`
|
||||
Quantize string `json:"quantize,omitempty"`
|
||||
|
||||
// Name is deprecated, see Model
|
||||
Name string `json:"name"`
|
||||
|
||||
// Quantization is deprecated, see Quantize
|
||||
Quantization string `json:"quantization,omitempty"`
|
||||
}
|
||||
|
||||
// DeleteRequest is the request passed to [Client.Delete].
|
||||
type DeleteRequest struct {
|
||||
Model string `json:"model"`
|
||||
|
||||
@@ -218,7 +153,6 @@ type DeleteRequest struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
// ShowRequest is the request passed to [Client.Show].
|
||||
type ShowRequest struct {
|
||||
Model string `json:"model"`
|
||||
System string `json:"system"`
|
||||
@@ -230,7 +164,6 @@ type ShowRequest struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
// ShowResponse is the response returned from [Client.Show].
|
||||
type ShowResponse struct {
|
||||
License string `json:"license,omitempty"`
|
||||
Modelfile string `json:"modelfile,omitempty"`
|
||||
@@ -238,16 +171,13 @@ type ShowResponse struct {
|
||||
Template string `json:"template,omitempty"`
|
||||
System string `json:"system,omitempty"`
|
||||
Details ModelDetails `json:"details,omitempty"`
|
||||
Messages []Message `json:"messages,omitempty"`
|
||||
}
|
||||
|
||||
// CopyRequest is the request passed to [Client.Copy].
|
||||
type CopyRequest struct {
|
||||
Source string `json:"source"`
|
||||
Destination string `json:"destination"`
|
||||
}
|
||||
|
||||
// PullRequest is the request passed to [Client.Pull].
|
||||
type PullRequest struct {
|
||||
Model string `json:"model"`
|
||||
Insecure bool `json:"insecure,omitempty"`
|
||||
@@ -259,8 +189,6 @@ type PullRequest struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
// ProgressResponse is the response passed to progress functions like
|
||||
// [PullProgressFunc] and [PushProgressFunc].
|
||||
type ProgressResponse struct {
|
||||
Status string `json:"status"`
|
||||
Digest string `json:"digest,omitempty"`
|
||||
@@ -268,7 +196,6 @@ type ProgressResponse struct {
|
||||
Completed int64 `json:"completed,omitempty"`
|
||||
}
|
||||
|
||||
// PushRequest is the request passed to [Client.Push].
|
||||
type PushRequest struct {
|
||||
Model string `json:"model"`
|
||||
Insecure bool `json:"insecure,omitempty"`
|
||||
@@ -280,54 +207,35 @@ type PushRequest struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
// ListResponse is the response from [Client.List].
|
||||
type ListResponse struct {
|
||||
Models []ModelResponse `json:"models"`
|
||||
}
|
||||
|
||||
// ModelResponse is a single model description in [ListResponse].
|
||||
type ModelResponse struct {
|
||||
Name string `json:"name"`
|
||||
Model string `json:"model"`
|
||||
ModifiedAt time.Time `json:"modified_at,omitempty"`
|
||||
ModifiedAt time.Time `json:"modified_at"`
|
||||
Size int64 `json:"size"`
|
||||
Digest string `json:"digest"`
|
||||
Details ModelDetails `json:"details,omitempty"`
|
||||
ExpiresAt time.Time `json:"expires_at,omitempty"`
|
||||
SizeVRAM int64 `json:"size_vram,omitempty"`
|
||||
}
|
||||
|
||||
type TokenResponse struct {
|
||||
Token string `json:"token"`
|
||||
}
|
||||
|
||||
// GenerateResponse is the response passed into [GenerateResponseFunc].
|
||||
type GenerateResponse struct {
|
||||
// Model is the model name that generated the response.
|
||||
Model string `json:"model"`
|
||||
|
||||
//CreatedAt is the timestamp of the response.
|
||||
Model string `json:"model"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
Response string `json:"response"`
|
||||
|
||||
// Response is the textual response itself.
|
||||
Response string `json:"response"`
|
||||
|
||||
// Done specifies if the response is complete.
|
||||
Done bool `json:"done"`
|
||||
|
||||
// DoneReason is the reason the model stopped generating text.
|
||||
DoneReason string `json:"done_reason,omitempty"`
|
||||
|
||||
// Context is an encoding of the conversation used in this response; this
|
||||
// can be sent in the next request to keep a conversational memory.
|
||||
Done bool `json:"done"`
|
||||
Context []int `json:"context,omitempty"`
|
||||
|
||||
Metrics
|
||||
}
|
||||
|
||||
// ModelDetails provides details about a model.
|
||||
type ModelDetails struct {
|
||||
ParentModel string `json:"parent_model"`
|
||||
Format string `json:"format"`
|
||||
Family string `json:"family"`
|
||||
Families []string `json:"families"`
|
||||
@@ -363,7 +271,7 @@ func (m *Metrics) Summary() {
|
||||
}
|
||||
}
|
||||
|
||||
var ErrInvalidHostPort = errors.New("invalid port specified in OLLAMA_HOST")
|
||||
var ErrInvalidOpts = fmt.Errorf("invalid options")
|
||||
|
||||
func (opts *Options) FromMap(m map[string]interface{}) error {
|
||||
valueOpts := reflect.ValueOf(opts).Elem() // names of the fields in the options struct
|
||||
@@ -378,83 +286,81 @@ func (opts *Options) FromMap(m map[string]interface{}) error {
|
||||
}
|
||||
}
|
||||
|
||||
invalidOpts := []string{}
|
||||
for key, val := range m {
|
||||
opt, ok := jsonOpts[key]
|
||||
if !ok {
|
||||
slog.Warn("invalid option provided", "option", opt.Name)
|
||||
continue
|
||||
}
|
||||
if opt, ok := jsonOpts[key]; ok {
|
||||
field := valueOpts.FieldByName(opt.Name)
|
||||
if field.IsValid() && field.CanSet() {
|
||||
if val == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
field := valueOpts.FieldByName(opt.Name)
|
||||
if field.IsValid() && field.CanSet() {
|
||||
if val == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
switch field.Kind() {
|
||||
case reflect.Int:
|
||||
switch t := val.(type) {
|
||||
case int64:
|
||||
field.SetInt(t)
|
||||
case float64:
|
||||
// when JSON unmarshals numbers, it uses float64, not int
|
||||
field.SetInt(int64(t))
|
||||
default:
|
||||
return fmt.Errorf("option %q must be of type integer", key)
|
||||
}
|
||||
case reflect.Bool:
|
||||
val, ok := val.(bool)
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of type boolean", key)
|
||||
}
|
||||
field.SetBool(val)
|
||||
case reflect.Float32:
|
||||
// JSON unmarshals to float64
|
||||
val, ok := val.(float64)
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of type float32", key)
|
||||
}
|
||||
field.SetFloat(val)
|
||||
case reflect.String:
|
||||
val, ok := val.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of type string", key)
|
||||
}
|
||||
field.SetString(val)
|
||||
case reflect.Slice:
|
||||
// JSON unmarshals to []interface{}, not []string
|
||||
val, ok := val.([]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of type array", key)
|
||||
}
|
||||
// convert []interface{} to []string
|
||||
slice := make([]string, len(val))
|
||||
for i, item := range val {
|
||||
str, ok := item.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of an array of strings", key)
|
||||
switch field.Kind() {
|
||||
case reflect.Int:
|
||||
switch t := val.(type) {
|
||||
case int64:
|
||||
field.SetInt(t)
|
||||
case float64:
|
||||
// when JSON unmarshals numbers, it uses float64, not int
|
||||
field.SetInt(int64(t))
|
||||
default:
|
||||
return fmt.Errorf("option %q must be of type integer", key)
|
||||
}
|
||||
slice[i] = str
|
||||
case reflect.Bool:
|
||||
val, ok := val.(bool)
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of type boolean", key)
|
||||
}
|
||||
field.SetBool(val)
|
||||
case reflect.Float32:
|
||||
// JSON unmarshals to float64
|
||||
val, ok := val.(float64)
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of type float32", key)
|
||||
}
|
||||
field.SetFloat(val)
|
||||
case reflect.String:
|
||||
val, ok := val.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of type string", key)
|
||||
}
|
||||
field.SetString(val)
|
||||
case reflect.Slice:
|
||||
// JSON unmarshals to []interface{}, not []string
|
||||
val, ok := val.([]interface{})
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of type array", key)
|
||||
}
|
||||
// convert []interface{} to []string
|
||||
slice := make([]string, len(val))
|
||||
for i, item := range val {
|
||||
str, ok := item.(string)
|
||||
if !ok {
|
||||
return fmt.Errorf("option %q must be of an array of strings", key)
|
||||
}
|
||||
slice[i] = str
|
||||
}
|
||||
field.Set(reflect.ValueOf(slice))
|
||||
default:
|
||||
return fmt.Errorf("unknown type loading config params: %v", field.Kind())
|
||||
}
|
||||
field.Set(reflect.ValueOf(slice))
|
||||
default:
|
||||
return fmt.Errorf("unknown type loading config params: %v", field.Kind())
|
||||
}
|
||||
} else {
|
||||
invalidOpts = append(invalidOpts, key)
|
||||
}
|
||||
}
|
||||
|
||||
if len(invalidOpts) > 0 {
|
||||
return fmt.Errorf("%w: %v", ErrInvalidOpts, strings.Join(invalidOpts, ", "))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// DefaultOptions is the default set of options for [GenerateRequest]; these
|
||||
// values are used unless the user specifies other values explicitly.
|
||||
func DefaultOptions() Options {
|
||||
return Options{
|
||||
// options set on request to runner
|
||||
NumPredict: -1,
|
||||
|
||||
// set a minimal num_keep to avoid issues on context shifts
|
||||
NumKeep: 4,
|
||||
NumPredict: -1,
|
||||
NumKeep: 0,
|
||||
Temperature: 0.8,
|
||||
TopK: 40,
|
||||
TopP: 0.9,
|
||||
@@ -472,15 +378,19 @@ func DefaultOptions() Options {
|
||||
|
||||
Runner: Runner{
|
||||
// options set when the model is loaded
|
||||
NumCtx: 2048,
|
||||
NumBatch: 512,
|
||||
NumGPU: -1, // -1 here indicates that NumGPU should be set dynamically
|
||||
NumThread: 0, // let the runtime decide
|
||||
LowVRAM: false,
|
||||
F16KV: true,
|
||||
UseMLock: false,
|
||||
UseMMap: true,
|
||||
UseNUMA: false,
|
||||
NumCtx: 2048,
|
||||
RopeFrequencyBase: 10000.0,
|
||||
RopeFrequencyScale: 1.0,
|
||||
NumBatch: 512,
|
||||
NumGPU: -1, // -1 here indicates that NumGPU should be set dynamically
|
||||
NumGQA: 1,
|
||||
NumThread: 0, // let the runtime decide
|
||||
LowVRAM: false,
|
||||
F16KV: true,
|
||||
UseMLock: false,
|
||||
UseMMap: true,
|
||||
UseNUMA: false,
|
||||
EmbeddingOnly: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -489,13 +399,6 @@ type Duration struct {
|
||||
time.Duration
|
||||
}
|
||||
|
||||
func (d Duration) MarshalJSON() ([]byte, error) {
|
||||
if d.Duration < 0 {
|
||||
return []byte("-1"), nil
|
||||
}
|
||||
return []byte("\"" + d.Duration.String() + "\""), nil
|
||||
}
|
||||
|
||||
func (d *Duration) UnmarshalJSON(b []byte) (err error) {
|
||||
var v any
|
||||
if err := json.Unmarshal(b, &v); err != nil {
|
||||
@@ -507,20 +410,15 @@ func (d *Duration) UnmarshalJSON(b []byte) (err error) {
|
||||
switch t := v.(type) {
|
||||
case float64:
|
||||
if t < 0 {
|
||||
d.Duration = time.Duration(math.MaxInt64)
|
||||
} else {
|
||||
d.Duration = time.Duration(int(t) * int(time.Second))
|
||||
t = math.MaxFloat64
|
||||
}
|
||||
|
||||
d.Duration = time.Duration(t)
|
||||
case string:
|
||||
d.Duration, err = time.ParseDuration(t)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if d.Duration < 0 {
|
||||
d.Duration = time.Duration(math.MaxInt64)
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("Unsupported type: '%s'", reflect.TypeOf(v))
|
||||
}
|
||||
|
||||
return nil
|
||||
|
@@ -1,107 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"math"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestKeepAliveParsingFromJSON(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
req string
|
||||
exp *Duration
|
||||
}{
|
||||
{
|
||||
name: "Positive Integer",
|
||||
req: `{ "keep_alive": 42 }`,
|
||||
exp: &Duration{42 * time.Second},
|
||||
},
|
||||
{
|
||||
name: "Positive Float",
|
||||
req: `{ "keep_alive": 42.5 }`,
|
||||
exp: &Duration{42 * time.Second},
|
||||
},
|
||||
{
|
||||
name: "Positive Integer String",
|
||||
req: `{ "keep_alive": "42m" }`,
|
||||
exp: &Duration{42 * time.Minute},
|
||||
},
|
||||
{
|
||||
name: "Negative Integer",
|
||||
req: `{ "keep_alive": -1 }`,
|
||||
exp: &Duration{math.MaxInt64},
|
||||
},
|
||||
{
|
||||
name: "Negative Float",
|
||||
req: `{ "keep_alive": -3.14 }`,
|
||||
exp: &Duration{math.MaxInt64},
|
||||
},
|
||||
{
|
||||
name: "Negative Integer String",
|
||||
req: `{ "keep_alive": "-1m" }`,
|
||||
exp: &Duration{math.MaxInt64},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
var dec ChatRequest
|
||||
err := json.Unmarshal([]byte(test.req), &dec)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.exp, dec.KeepAlive)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestDurationMarshalUnmarshal(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input time.Duration
|
||||
expected time.Duration
|
||||
}{
|
||||
{
|
||||
"negative duration",
|
||||
time.Duration(-1),
|
||||
time.Duration(math.MaxInt64),
|
||||
},
|
||||
{
|
||||
"positive duration",
|
||||
time.Duration(42 * time.Second),
|
||||
time.Duration(42 * time.Second),
|
||||
},
|
||||
{
|
||||
"another positive duration",
|
||||
time.Duration(42 * time.Minute),
|
||||
time.Duration(42 * time.Minute),
|
||||
},
|
||||
{
|
||||
"zero duration",
|
||||
time.Duration(0),
|
||||
time.Duration(0),
|
||||
},
|
||||
{
|
||||
"max duration",
|
||||
time.Duration(math.MaxInt64),
|
||||
time.Duration(math.MaxInt64),
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
b, err := json.Marshal(Duration{test.input})
|
||||
require.NoError(t, err)
|
||||
|
||||
var d Duration
|
||||
err = json.Unmarshal(b, &d)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, test.expected, d.Duration, "input %v, marshalled %v, got %v", test.input, string(b), d.Duration)
|
||||
})
|
||||
}
|
||||
}
|
93
app/.gitignore
vendored
@@ -1 +1,92 @@
|
||||
ollama.syso
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
.DS_Store
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
.env.test
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
|
||||
# next.js build output
|
||||
.next
|
||||
|
||||
# nuxt.js build output
|
||||
.nuxt
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# Webpack
|
||||
.webpack/
|
||||
|
||||
# Vite
|
||||
.vite/
|
||||
|
||||
# Electron-Forge
|
||||
out/
|
||||
|
@@ -1,22 +1,21 @@
|
||||
# Ollama App
|
||||
# Desktop
|
||||
|
||||
## Linux
|
||||
This app builds upon Ollama to provide a desktop experience for running models.
|
||||
|
||||
TODO
|
||||
## Developing
|
||||
|
||||
## MacOS
|
||||
|
||||
TODO
|
||||
|
||||
## Windows
|
||||
|
||||
If you want to build the installer, youll need to install
|
||||
- https://jrsoftware.org/isinfo.php
|
||||
|
||||
|
||||
In the top directory of this repo, run the following powershell script
|
||||
to build the ollama CLI, ollama app, and ollama installer.
|
||||
First, build the `ollama` binary:
|
||||
|
||||
```
|
||||
powershell -ExecutionPolicy Bypass -File .\scripts\build_windows.ps1
|
||||
cd ..
|
||||
go build .
|
||||
```
|
||||
|
||||
Then run the desktop app with `npm start`:
|
||||
|
||||
```
|
||||
cd app
|
||||
npm install
|
||||
npm start
|
||||
```
|
||||
|
||||
|
Before Width: | Height: | Size: 7.3 KiB |
@@ -1,17 +0,0 @@
|
||||
package assets
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"io/fs"
|
||||
)
|
||||
|
||||
//go:embed *.ico
|
||||
var icons embed.FS
|
||||
|
||||
func ListIcons() ([]string, error) {
|
||||
return fs.Glob(icons, "*")
|
||||
}
|
||||
|
||||
func GetIcon(filename string) ([]byte, error) {
|
||||
return icons.ReadFile(filename)
|
||||
}
|
Before Width: | Height: | Size: 402 B After Width: | Height: | Size: 402 B |
Before Width: | Height: | Size: 741 B After Width: | Height: | Size: 741 B |
Before Width: | Height: | Size: 440 B After Width: | Height: | Size: 440 B |
Before Width: | Height: | Size: 763 B After Width: | Height: | Size: 763 B |
Before Width: | Height: | Size: 447 B After Width: | Height: | Size: 447 B |
Before Width: | Height: | Size: 891 B After Width: | Height: | Size: 891 B |
Before Width: | Height: | Size: 443 B After Width: | Height: | Size: 443 B |
Before Width: | Height: | Size: 844 B After Width: | Height: | Size: 844 B |
Before Width: | Height: | Size: 76 KiB |
Before Width: | Height: | Size: 89 KiB |
Before Width: | Height: | Size: 91 KiB |
@@ -1,9 +0,0 @@
|
||||
//go:build !windows
|
||||
|
||||
package lifecycle
|
||||
|
||||
import "fmt"
|
||||
|
||||
func GetStarted() error {
|
||||
return fmt.Errorf("GetStarted not implemented")
|
||||
}
|
@@ -1,44 +0,0 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func GetStarted() error {
|
||||
const CREATE_NEW_CONSOLE = 0x00000010
|
||||
var err error
|
||||
bannerScript := filepath.Join(AppDir, "ollama_welcome.ps1")
|
||||
args := []string{
|
||||
// TODO once we're signed, the execution policy bypass should be removed
|
||||
"powershell", "-noexit", "-ExecutionPolicy", "Bypass", "-nologo", "-file", bannerScript,
|
||||
}
|
||||
args[0], err = exec.LookPath(args[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Make sure the script actually exists
|
||||
_, err = os.Stat(bannerScript)
|
||||
if err != nil {
|
||||
return fmt.Errorf("getting started banner script error %s", err)
|
||||
}
|
||||
|
||||
slog.Info(fmt.Sprintf("opening getting started terminal with %v", args))
|
||||
attrs := &os.ProcAttr{
|
||||
Files: []*os.File{os.Stdin, os.Stdout, os.Stderr},
|
||||
Sys: &syscall.SysProcAttr{CreationFlags: CREATE_NEW_CONSOLE, HideWindow: false},
|
||||
}
|
||||
proc, err := os.StartProcess(args[0], args, attrs)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to start getting started shell %w", err)
|
||||
}
|
||||
|
||||
slog.Debug(fmt.Sprintf("getting started terminal PID: %d", proc.Pid))
|
||||
return proc.Release()
|
||||
}
|
@@ -1,92 +0,0 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
|
||||
"github.com/ollama/ollama/app/store"
|
||||
"github.com/ollama/ollama/app/tray"
|
||||
)
|
||||
|
||||
func Run() {
|
||||
InitLogging()
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
var done chan int
|
||||
|
||||
t, err := tray.NewTray()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to start: %s", err)
|
||||
}
|
||||
callbacks := t.GetCallbacks()
|
||||
|
||||
signals := make(chan os.Signal, 1)
|
||||
signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
go func() {
|
||||
slog.Debug("starting callback loop")
|
||||
for {
|
||||
select {
|
||||
case <-callbacks.Quit:
|
||||
slog.Debug("quit called")
|
||||
t.Quit()
|
||||
case <-signals:
|
||||
slog.Debug("shutting down due to signal")
|
||||
t.Quit()
|
||||
case <-callbacks.Update:
|
||||
err := DoUpgrade(cancel, done)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("upgrade attempt failed: %s", err))
|
||||
}
|
||||
case <-callbacks.ShowLogs:
|
||||
ShowLogs()
|
||||
case <-callbacks.DoFirstUse:
|
||||
err := GetStarted()
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("Failed to launch getting started shell: %s", err))
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// Are we first use?
|
||||
if !store.GetFirstTimeRun() {
|
||||
slog.Debug("First time run")
|
||||
err = t.DisplayFirstUseNotification()
|
||||
if err != nil {
|
||||
slog.Debug(fmt.Sprintf("XXX failed to display first use notification %v", err))
|
||||
}
|
||||
store.SetFirstTimeRun(true)
|
||||
} else {
|
||||
slog.Debug("Not first time, skipping first run notification")
|
||||
}
|
||||
|
||||
if IsServerRunning(ctx) {
|
||||
slog.Info("Detected another instance of ollama running, exiting")
|
||||
os.Exit(1)
|
||||
} else {
|
||||
done, err = SpawnServer(ctx, CLIName)
|
||||
if err != nil {
|
||||
// TODO - should we retry in a backoff loop?
|
||||
// TODO - should we pop up a warning and maybe add a menu item to view application logs?
|
||||
slog.Error(fmt.Sprintf("Failed to spawn ollama server %s", err))
|
||||
done = make(chan int, 1)
|
||||
done <- 1
|
||||
}
|
||||
}
|
||||
|
||||
StartBackgroundUpdaterChecker(ctx, t.UpdateAvailable)
|
||||
|
||||
t.Run()
|
||||
cancel()
|
||||
slog.Info("Waiting for ollama server to shutdown...")
|
||||
if done != nil {
|
||||
<-done
|
||||
}
|
||||
slog.Info("Ollama app exiting")
|
||||
}
|
@@ -1,48 +0,0 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/ollama/ollama/envconfig"
|
||||
)
|
||||
|
||||
func InitLogging() {
|
||||
level := slog.LevelInfo
|
||||
|
||||
if envconfig.Debug {
|
||||
level = slog.LevelDebug
|
||||
}
|
||||
|
||||
var logFile *os.File
|
||||
var err error
|
||||
// Detect if we're a GUI app on windows, and if not, send logs to console
|
||||
if os.Stderr.Fd() != 0 {
|
||||
// Console app detected
|
||||
logFile = os.Stderr
|
||||
// TODO - write one-line to the app.log file saying we're running in console mode to help avoid confusion
|
||||
} else {
|
||||
logFile, err = os.OpenFile(AppLogFile, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0755)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to create server log %v", err))
|
||||
return
|
||||
}
|
||||
}
|
||||
handler := slog.NewTextHandler(logFile, &slog.HandlerOptions{
|
||||
Level: level,
|
||||
AddSource: true,
|
||||
ReplaceAttr: func(_ []string, attr slog.Attr) slog.Attr {
|
||||
if attr.Key == slog.SourceKey {
|
||||
source := attr.Value.Any().(*slog.Source)
|
||||
source.File = filepath.Base(source.File)
|
||||
}
|
||||
return attr
|
||||
},
|
||||
})
|
||||
|
||||
slog.SetDefault(slog.New(handler))
|
||||
|
||||
slog.Info("ollama app started")
|
||||
}
|
@@ -1,9 +0,0 @@
|
||||
//go:build !windows
|
||||
|
||||
package lifecycle
|
||||
|
||||
import "log/slog"
|
||||
|
||||
func ShowLogs() {
|
||||
slog.Warn("ShowLogs not yet implemented")
|
||||
}
|
@@ -1,19 +0,0 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os/exec"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func ShowLogs() {
|
||||
cmd_path := "c:\\Windows\\system32\\cmd.exe"
|
||||
slog.Debug(fmt.Sprintf("viewing logs with start %s", AppDataDir))
|
||||
cmd := exec.Command(cmd_path, "/c", "start", AppDataDir)
|
||||
cmd.SysProcAttr = &syscall.SysProcAttr{HideWindow: false, CreationFlags: 0x08000000}
|
||||
err := cmd.Start()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("Failed to open log dir: %s", err))
|
||||
}
|
||||
}
|
@@ -1,79 +0,0 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
AppName = "ollama app"
|
||||
CLIName = "ollama"
|
||||
AppDir = "/opt/Ollama"
|
||||
AppDataDir = "/opt/Ollama"
|
||||
// TODO - should there be a distinct log dir?
|
||||
UpdateStageDir = "/tmp"
|
||||
AppLogFile = "/tmp/ollama_app.log"
|
||||
ServerLogFile = "/tmp/ollama.log"
|
||||
UpgradeLogFile = "/tmp/ollama_update.log"
|
||||
Installer = "OllamaSetup.exe"
|
||||
)
|
||||
|
||||
func init() {
|
||||
if runtime.GOOS == "windows" {
|
||||
AppName += ".exe"
|
||||
CLIName += ".exe"
|
||||
// Logs, configs, downloads go to LOCALAPPDATA
|
||||
localAppData := os.Getenv("LOCALAPPDATA")
|
||||
AppDataDir = filepath.Join(localAppData, "Ollama")
|
||||
UpdateStageDir = filepath.Join(AppDataDir, "updates")
|
||||
AppLogFile = filepath.Join(AppDataDir, "app.log")
|
||||
ServerLogFile = filepath.Join(AppDataDir, "server.log")
|
||||
UpgradeLogFile = filepath.Join(AppDataDir, "upgrade.log")
|
||||
|
||||
// Executables are stored in APPDATA
|
||||
AppDir = filepath.Join(localAppData, "Programs", "Ollama")
|
||||
|
||||
// Make sure we have PATH set correctly for any spawned children
|
||||
paths := strings.Split(os.Getenv("PATH"), ";")
|
||||
// Start with whatever we find in the PATH/LD_LIBRARY_PATH
|
||||
found := false
|
||||
for _, path := range paths {
|
||||
d, err := filepath.Abs(path)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if strings.EqualFold(AppDir, d) {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
paths = append(paths, AppDir)
|
||||
|
||||
pathVal := strings.Join(paths, ";")
|
||||
slog.Debug("setting PATH=" + pathVal)
|
||||
err := os.Setenv("PATH", pathVal)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to update PATH: %s", err))
|
||||
}
|
||||
}
|
||||
|
||||
// Make sure our logging dir exists
|
||||
_, err := os.Stat(AppDataDir)
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
if err := os.MkdirAll(AppDataDir, 0o755); err != nil {
|
||||
slog.Error(fmt.Sprintf("create ollama dir %s: %v", AppDataDir, err))
|
||||
}
|
||||
}
|
||||
|
||||
} else if runtime.GOOS == "darwin" {
|
||||
// TODO
|
||||
AppName += ".app"
|
||||
// } else if runtime.GOOS == "linux" {
|
||||
// TODO
|
||||
}
|
||||
}
|
@@ -1,181 +0,0 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/ollama/ollama/api"
|
||||
)
|
||||
|
||||
func getCLIFullPath(command string) string {
|
||||
cmdPath := ""
|
||||
appExe, err := os.Executable()
|
||||
if err == nil {
|
||||
cmdPath = filepath.Join(filepath.Dir(appExe), command)
|
||||
_, err := os.Stat(cmdPath)
|
||||
if err == nil {
|
||||
return cmdPath
|
||||
}
|
||||
}
|
||||
cmdPath, err = exec.LookPath(command)
|
||||
if err == nil {
|
||||
_, err := os.Stat(cmdPath)
|
||||
if err == nil {
|
||||
return cmdPath
|
||||
}
|
||||
}
|
||||
pwd, err := os.Getwd()
|
||||
if err == nil {
|
||||
cmdPath = filepath.Join(pwd, command)
|
||||
_, err = os.Stat(cmdPath)
|
||||
if err == nil {
|
||||
return cmdPath
|
||||
}
|
||||
}
|
||||
|
||||
return command
|
||||
}
|
||||
|
||||
func start(ctx context.Context, command string) (*exec.Cmd, error) {
|
||||
cmd := getCmd(ctx, getCLIFullPath(command))
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to spawn server stdout pipe: %w", err)
|
||||
}
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to spawn server stderr pipe: %w", err)
|
||||
}
|
||||
|
||||
// TODO - rotation
|
||||
logFile, err := os.OpenFile(ServerLogFile, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0755)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create server log: %w", err)
|
||||
}
|
||||
|
||||
logDir := filepath.Dir(ServerLogFile)
|
||||
_, err = os.Stat(logDir)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
return nil, fmt.Errorf("stat ollama server log dir %s: %v", logDir, err)
|
||||
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(logDir, 0o755); err != nil {
|
||||
return nil, fmt.Errorf("create ollama server log dir %s: %v", logDir, err)
|
||||
}
|
||||
}
|
||||
|
||||
go func() {
|
||||
defer logFile.Close()
|
||||
io.Copy(logFile, stdout) //nolint:errcheck
|
||||
}()
|
||||
go func() {
|
||||
defer logFile.Close()
|
||||
io.Copy(logFile, stderr) //nolint:errcheck
|
||||
}()
|
||||
|
||||
// Re-wire context done behavior to attempt a graceful shutdown of the server
|
||||
cmd.Cancel = func() error {
|
||||
if cmd.Process != nil {
|
||||
err := terminate(cmd)
|
||||
if err != nil {
|
||||
slog.Warn("error trying to gracefully terminate server", "err", err)
|
||||
return cmd.Process.Kill()
|
||||
}
|
||||
|
||||
tick := time.NewTicker(10 * time.Millisecond)
|
||||
defer tick.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-tick.C:
|
||||
exited, err := isProcessExited(cmd.Process.Pid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if exited {
|
||||
return nil
|
||||
}
|
||||
case <-time.After(5 * time.Second):
|
||||
slog.Warn("graceful server shutdown timeout, killing", "pid", cmd.Process.Pid)
|
||||
return cmd.Process.Kill()
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// run the command and wait for it to finish
|
||||
if err := cmd.Start(); err != nil {
|
||||
return nil, fmt.Errorf("failed to start server %w", err)
|
||||
}
|
||||
if cmd.Process != nil {
|
||||
slog.Info(fmt.Sprintf("started ollama server with pid %d", cmd.Process.Pid))
|
||||
}
|
||||
slog.Info(fmt.Sprintf("ollama server logs %s", ServerLogFile))
|
||||
|
||||
return cmd, nil
|
||||
}
|
||||
|
||||
func SpawnServer(ctx context.Context, command string) (chan int, error) {
|
||||
done := make(chan int)
|
||||
|
||||
go func() {
|
||||
// Keep the server running unless we're shuttind down the app
|
||||
crashCount := 0
|
||||
for {
|
||||
slog.Info("starting server...")
|
||||
cmd, err := start(ctx, command)
|
||||
if err != nil {
|
||||
crashCount++
|
||||
slog.Error(fmt.Sprintf("failed to start server %s", err))
|
||||
time.Sleep(500 * time.Millisecond * time.Duration(crashCount))
|
||||
continue
|
||||
}
|
||||
|
||||
cmd.Wait() //nolint:errcheck
|
||||
var code int
|
||||
if cmd.ProcessState != nil {
|
||||
code = cmd.ProcessState.ExitCode()
|
||||
}
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
slog.Info(fmt.Sprintf("server shutdown with exit code %d", code))
|
||||
done <- code
|
||||
return
|
||||
default:
|
||||
crashCount++
|
||||
slog.Warn(fmt.Sprintf("server crash %d - exit code %d - respawning", crashCount, code))
|
||||
time.Sleep(500 * time.Millisecond * time.Duration(crashCount))
|
||||
break
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return done, nil
|
||||
}
|
||||
|
||||
func IsServerRunning(ctx context.Context) bool {
|
||||
client, err := api.ClientFromEnvironment()
|
||||
if err != nil {
|
||||
slog.Info("unable to connect to server")
|
||||
return false
|
||||
}
|
||||
err = client.Heartbeat(ctx)
|
||||
if err != nil {
|
||||
slog.Debug(fmt.Sprintf("heartbeat from server: %s", err))
|
||||
slog.Info("unable to connect to server")
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
@@ -1,38 +0,0 @@
|
||||
//go:build !windows
|
||||
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func getCmd(ctx context.Context, cmd string) *exec.Cmd {
|
||||
return exec.CommandContext(ctx, cmd, "serve")
|
||||
}
|
||||
|
||||
func terminate(cmd *exec.Cmd) error {
|
||||
return cmd.Process.Signal(os.Interrupt)
|
||||
}
|
||||
|
||||
func isProcessExited(pid int) (bool, error) {
|
||||
proc, err := os.FindProcess(pid)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to find process: %v", err)
|
||||
}
|
||||
|
||||
err = proc.Signal(syscall.Signal(0))
|
||||
if err != nil {
|
||||
if errors.Is(err, os.ErrProcessDone) || errors.Is(err, syscall.ESRCH) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return false, fmt.Errorf("error signaling process: %v", err)
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
@@ -1,89 +0,0 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os/exec"
|
||||
"syscall"
|
||||
|
||||
"golang.org/x/sys/windows"
|
||||
)
|
||||
|
||||
func getCmd(ctx context.Context, exePath string) *exec.Cmd {
|
||||
cmd := exec.CommandContext(ctx, exePath, "serve")
|
||||
cmd.SysProcAttr = &syscall.SysProcAttr{
|
||||
HideWindow: true,
|
||||
CreationFlags: windows.CREATE_NEW_PROCESS_GROUP,
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func terminate(cmd *exec.Cmd) error {
|
||||
dll, err := windows.LoadDLL("kernel32.dll")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer dll.Release() // nolint: errcheck
|
||||
|
||||
pid := cmd.Process.Pid
|
||||
|
||||
f, err := dll.FindProc("AttachConsole")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r1, _, err := f.Call(uintptr(pid))
|
||||
if r1 == 0 && err != syscall.ERROR_ACCESS_DENIED {
|
||||
return err
|
||||
}
|
||||
|
||||
f, err = dll.FindProc("SetConsoleCtrlHandler")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r1, _, err = f.Call(0, 1)
|
||||
if r1 == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
f, err = dll.FindProc("GenerateConsoleCtrlEvent")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r1, _, err = f.Call(windows.CTRL_BREAK_EVENT, uintptr(pid))
|
||||
if r1 == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
r1, _, err = f.Call(windows.CTRL_C_EVENT, uintptr(pid))
|
||||
if r1 == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
const STILL_ACTIVE = 259
|
||||
|
||||
func isProcessExited(pid int) (bool, error) {
|
||||
hProcess, err := windows.OpenProcess(windows.PROCESS_QUERY_INFORMATION, false, uint32(pid))
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to open process: %v", err)
|
||||
}
|
||||
defer windows.CloseHandle(hProcess) // nolint: errcheck
|
||||
|
||||
var exitCode uint32
|
||||
err = windows.GetExitCodeProcess(hProcess, &exitCode)
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to get exit code: %v", err)
|
||||
}
|
||||
|
||||
if exitCode == STILL_ACTIVE {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
@@ -1,228 +0,0 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"mime"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/ollama/ollama/auth"
|
||||
"github.com/ollama/ollama/version"
|
||||
)
|
||||
|
||||
var (
|
||||
UpdateCheckURLBase = "https://ollama.com/api/update"
|
||||
UpdateDownloaded = false
|
||||
UpdateCheckInterval = 60 * 60 * time.Second
|
||||
)
|
||||
|
||||
// TODO - maybe move up to the API package?
|
||||
type UpdateResponse struct {
|
||||
UpdateURL string `json:"url"`
|
||||
UpdateVersion string `json:"version"`
|
||||
}
|
||||
|
||||
func IsNewReleaseAvailable(ctx context.Context) (bool, UpdateResponse) {
|
||||
var updateResp UpdateResponse
|
||||
|
||||
requestURL, err := url.Parse(UpdateCheckURLBase)
|
||||
if err != nil {
|
||||
return false, updateResp
|
||||
}
|
||||
|
||||
query := requestURL.Query()
|
||||
query.Add("os", runtime.GOOS)
|
||||
query.Add("arch", runtime.GOARCH)
|
||||
query.Add("version", version.Version)
|
||||
query.Add("ts", fmt.Sprintf("%d", time.Now().Unix()))
|
||||
|
||||
nonce, err := auth.NewNonce(rand.Reader, 16)
|
||||
if err != nil {
|
||||
return false, updateResp
|
||||
}
|
||||
|
||||
query.Add("nonce", nonce)
|
||||
requestURL.RawQuery = query.Encode()
|
||||
|
||||
data := []byte(fmt.Sprintf("%s,%s", http.MethodGet, requestURL.RequestURI()))
|
||||
signature, err := auth.Sign(ctx, data)
|
||||
if err != nil {
|
||||
return false, updateResp
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, requestURL.String(), nil)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("failed to check for update: %s", err))
|
||||
return false, updateResp
|
||||
}
|
||||
req.Header.Set("Authorization", signature)
|
||||
req.Header.Set("User-Agent", fmt.Sprintf("ollama/%s (%s %s) Go/%s", version.Version, runtime.GOARCH, runtime.GOOS, runtime.Version()))
|
||||
|
||||
slog.Debug("checking for available update", "requestURL", requestURL)
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("failed to check for update: %s", err))
|
||||
return false, updateResp
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == 204 {
|
||||
slog.Debug("check update response 204 (current version is up to date)")
|
||||
return false, updateResp
|
||||
}
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("failed to read body response: %s", err))
|
||||
}
|
||||
|
||||
if resp.StatusCode != 200 {
|
||||
slog.Info(fmt.Sprintf("check update error %d - %.96s", resp.StatusCode, string(body)))
|
||||
return false, updateResp
|
||||
}
|
||||
err = json.Unmarshal(body, &updateResp)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("malformed response checking for update: %s", err))
|
||||
return false, updateResp
|
||||
}
|
||||
// Extract the version string from the URL in the github release artifact path
|
||||
updateResp.UpdateVersion = path.Base(path.Dir(updateResp.UpdateURL))
|
||||
|
||||
slog.Info("New update available at " + updateResp.UpdateURL)
|
||||
return true, updateResp
|
||||
}
|
||||
|
||||
func DownloadNewRelease(ctx context.Context, updateResp UpdateResponse) error {
|
||||
// Do a head first to check etag info
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodHead, updateResp.UpdateURL, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error checking update: %w", err)
|
||||
}
|
||||
if resp.StatusCode != 200 {
|
||||
return fmt.Errorf("unexpected status attempting to download update %d", resp.StatusCode)
|
||||
}
|
||||
resp.Body.Close()
|
||||
etag := strings.Trim(resp.Header.Get("etag"), "\"")
|
||||
if etag == "" {
|
||||
slog.Debug("no etag detected, falling back to filename based dedup")
|
||||
etag = "_"
|
||||
}
|
||||
filename := Installer
|
||||
_, params, err := mime.ParseMediaType(resp.Header.Get("content-disposition"))
|
||||
if err == nil {
|
||||
filename = params["filename"]
|
||||
}
|
||||
|
||||
stageFilename := filepath.Join(UpdateStageDir, etag, filename)
|
||||
|
||||
// Check to see if we already have it downloaded
|
||||
_, err = os.Stat(stageFilename)
|
||||
if err == nil {
|
||||
slog.Info("update already downloaded")
|
||||
return nil
|
||||
}
|
||||
|
||||
cleanupOldDownloads()
|
||||
|
||||
req.Method = http.MethodGet
|
||||
resp, err = http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error checking update: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
etag = strings.Trim(resp.Header.Get("etag"), "\"")
|
||||
if etag == "" {
|
||||
slog.Debug("no etag detected, falling back to filename based dedup") // TODO probably can get rid of this redundant log
|
||||
etag = "_"
|
||||
}
|
||||
|
||||
stageFilename = filepath.Join(UpdateStageDir, etag, filename)
|
||||
|
||||
_, err = os.Stat(filepath.Dir(stageFilename))
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
if err := os.MkdirAll(filepath.Dir(stageFilename), 0o755); err != nil {
|
||||
return fmt.Errorf("create ollama dir %s: %v", filepath.Dir(stageFilename), err)
|
||||
}
|
||||
}
|
||||
|
||||
payload, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read body response: %w", err)
|
||||
}
|
||||
fp, err := os.OpenFile(stageFilename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o755)
|
||||
if err != nil {
|
||||
return fmt.Errorf("write payload %s: %w", stageFilename, err)
|
||||
}
|
||||
defer fp.Close()
|
||||
if n, err := fp.Write(payload); err != nil || n != len(payload) {
|
||||
return fmt.Errorf("write payload %s: %d vs %d -- %w", stageFilename, n, len(payload), err)
|
||||
}
|
||||
slog.Info("new update downloaded " + stageFilename)
|
||||
|
||||
UpdateDownloaded = true
|
||||
return nil
|
||||
}
|
||||
|
||||
func cleanupOldDownloads() {
|
||||
files, err := os.ReadDir(UpdateStageDir)
|
||||
if err != nil && errors.Is(err, os.ErrNotExist) {
|
||||
// Expected behavior on first run
|
||||
return
|
||||
} else if err != nil {
|
||||
slog.Warn(fmt.Sprintf("failed to list stage dir: %s", err))
|
||||
return
|
||||
}
|
||||
for _, file := range files {
|
||||
fullname := filepath.Join(UpdateStageDir, file.Name())
|
||||
slog.Debug("cleaning up old download: " + fullname)
|
||||
err = os.RemoveAll(fullname)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("failed to cleanup stale update download %s", err))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func StartBackgroundUpdaterChecker(ctx context.Context, cb func(string) error) {
|
||||
go func() {
|
||||
// Don't blast an update message immediately after startup
|
||||
// time.Sleep(30 * time.Second)
|
||||
time.Sleep(3 * time.Second)
|
||||
|
||||
for {
|
||||
available, resp := IsNewReleaseAvailable(ctx)
|
||||
if available {
|
||||
err := DownloadNewRelease(ctx, resp)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to download new release: %s", err))
|
||||
}
|
||||
err = cb(resp.UpdateVersion)
|
||||
if err != nil {
|
||||
slog.Warn(fmt.Sprintf("failed to register update available with tray: %s", err))
|
||||
}
|
||||
}
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
slog.Debug("stopping background update checker")
|
||||
return
|
||||
default:
|
||||
time.Sleep(UpdateCheckInterval)
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
@@ -1,12 +0,0 @@
|
||||
//go:build !windows
|
||||
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func DoUpgrade(cancel context.CancelFunc, done chan int) error {
|
||||
return fmt.Errorf("DoUpgrade not yet implemented")
|
||||
}
|
@@ -1,77 +0,0 @@
|
||||
package lifecycle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func DoUpgrade(cancel context.CancelFunc, done chan int) error {
|
||||
files, err := filepath.Glob(filepath.Join(UpdateStageDir, "*", "*.exe")) // TODO generalize for multiplatform
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to lookup downloads: %s", err)
|
||||
}
|
||||
if len(files) == 0 {
|
||||
return fmt.Errorf("no update downloads found")
|
||||
} else if len(files) > 1 {
|
||||
// Shouldn't happen
|
||||
slog.Warn(fmt.Sprintf("multiple downloads found, using first one %v", files))
|
||||
}
|
||||
installerExe := files[0]
|
||||
|
||||
slog.Info("starting upgrade with " + installerExe)
|
||||
slog.Info("upgrade log file " + UpgradeLogFile)
|
||||
|
||||
// When running in debug mode, we'll be "verbose" and let the installer pop up and prompt
|
||||
installArgs := []string{
|
||||
"/CLOSEAPPLICATIONS", // Quit the tray app if it's still running
|
||||
"/LOG=" + filepath.Base(UpgradeLogFile), // Only relative seems reliable, so set pwd
|
||||
"/FORCECLOSEAPPLICATIONS", // Force close the tray app - might be needed
|
||||
}
|
||||
// make the upgrade as quiet as possible (no GUI, no prompts)
|
||||
installArgs = append(installArgs,
|
||||
"/SP", // Skip the "This will install... Do you wish to continue" prompt
|
||||
"/SUPPRESSMSGBOXES",
|
||||
"/SILENT",
|
||||
"/VERYSILENT",
|
||||
)
|
||||
|
||||
// Safeguard in case we have requests in flight that need to drain...
|
||||
slog.Info("Waiting for server to shutdown")
|
||||
cancel()
|
||||
if done != nil {
|
||||
<-done
|
||||
} else {
|
||||
// Shouldn't happen
|
||||
slog.Warn("done chan was nil, not actually waiting")
|
||||
}
|
||||
|
||||
slog.Debug(fmt.Sprintf("starting installer: %s %v", installerExe, installArgs))
|
||||
os.Chdir(filepath.Dir(UpgradeLogFile)) //nolint:errcheck
|
||||
cmd := exec.Command(installerExe, installArgs...)
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
return fmt.Errorf("unable to start ollama app %w", err)
|
||||
}
|
||||
|
||||
if cmd.Process != nil {
|
||||
err = cmd.Process.Release()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to release server process: %s", err))
|
||||
}
|
||||
} else {
|
||||
// TODO - some details about why it didn't start, or is this a pedantic error case?
|
||||
return fmt.Errorf("installer process did not start")
|
||||
}
|
||||
|
||||
// TODO should we linger for a moment and check to make sure it's actually running by checking the pid?
|
||||
|
||||
slog.Info("Installer started in background, exiting")
|
||||
|
||||
os.Exit(0)
|
||||
// Not reached
|
||||
return nil
|
||||
}
|
12
app/main.go
@@ -1,12 +0,0 @@
|
||||
package main
|
||||
|
||||
// Compile with the following to get rid of the cmd pop up on windows
|
||||
// go build -ldflags="-H windowsgui" .
|
||||
|
||||
import (
|
||||
"github.com/ollama/ollama/app/lifecycle"
|
||||
)
|
||||
|
||||
func main() {
|
||||
lifecycle.Run()
|
||||
}
|
156
app/ollama.iss
@@ -1,156 +0,0 @@
|
||||
; Inno Setup Installer for Ollama
|
||||
;
|
||||
; To build the installer use the build script invoked from the top of the source tree
|
||||
;
|
||||
; powershell -ExecutionPolicy Bypass -File .\scripts\build_windows.ps
|
||||
|
||||
|
||||
#define MyAppName "Ollama"
|
||||
#if GetEnv("PKG_VERSION") != ""
|
||||
#define MyAppVersion GetEnv("PKG_VERSION")
|
||||
#else
|
||||
#define MyAppVersion "0.0.0"
|
||||
#endif
|
||||
#define MyAppPublisher "Ollama"
|
||||
#define MyAppURL "https://ollama.com/"
|
||||
#define MyAppExeName "ollama app.exe"
|
||||
#define MyIcon ".\assets\app.ico"
|
||||
|
||||
[Setup]
|
||||
; NOTE: The value of AppId uniquely identifies this application. Do not use the same AppId value in installers for other applications.
|
||||
; (To generate a new GUID, click Tools | Generate GUID inside the IDE.)
|
||||
AppId={{44E83376-CE68-45EB-8FC1-393500EB558C}
|
||||
AppName={#MyAppName}
|
||||
AppVersion={#MyAppVersion}
|
||||
VersionInfoVersion={#MyAppVersion}
|
||||
;AppVerName={#MyAppName} {#MyAppVersion}
|
||||
AppPublisher={#MyAppPublisher}
|
||||
AppPublisherURL={#MyAppURL}
|
||||
AppSupportURL={#MyAppURL}
|
||||
AppUpdatesURL={#MyAppURL}
|
||||
ArchitecturesAllowed=x64 arm64
|
||||
ArchitecturesInstallIn64BitMode=x64 arm64
|
||||
DefaultDirName={localappdata}\Programs\{#MyAppName}
|
||||
DefaultGroupName={#MyAppName}
|
||||
DisableProgramGroupPage=yes
|
||||
PrivilegesRequired=lowest
|
||||
OutputBaseFilename="OllamaSetup"
|
||||
SetupIconFile={#MyIcon}
|
||||
UninstallDisplayIcon={uninstallexe}
|
||||
Compression=lzma2
|
||||
SolidCompression=no
|
||||
WizardStyle=modern
|
||||
ChangesEnvironment=yes
|
||||
OutputDir=..\dist\
|
||||
|
||||
; Disable logging once everything's battle tested
|
||||
; Filename will be %TEMP%\Setup Log*.txt
|
||||
SetupLogging=yes
|
||||
CloseApplications=yes
|
||||
RestartApplications=no
|
||||
|
||||
; https://jrsoftware.org/ishelp/index.php?topic=setup_wizardimagefile
|
||||
WizardSmallImageFile=.\assets\setup.bmp
|
||||
|
||||
; TODO verifty actual min windows version...
|
||||
; OG Win 10
|
||||
MinVersion=10.0.10240
|
||||
|
||||
; First release that supports WinRT UI Composition for win32 apps
|
||||
; MinVersion=10.0.17134
|
||||
; First release with XAML Islands - possible UI path forward
|
||||
; MinVersion=10.0.18362
|
||||
|
||||
; quiet...
|
||||
DisableDirPage=yes
|
||||
DisableFinishedPage=yes
|
||||
DisableReadyMemo=yes
|
||||
DisableReadyPage=yes
|
||||
DisableStartupPrompt=yes
|
||||
DisableWelcomePage=yes
|
||||
|
||||
; TODO - percentage can't be set less than 100, so how to make it shorter?
|
||||
; WizardSizePercent=100,80
|
||||
|
||||
#if GetEnv("KEY_CONTAINER")
|
||||
SignTool=MySignTool
|
||||
SignedUninstaller=yes
|
||||
#endif
|
||||
|
||||
SetupMutex=OllamaSetupMutex
|
||||
|
||||
[Languages]
|
||||
Name: "english"; MessagesFile: "compiler:Default.isl"
|
||||
|
||||
[LangOptions]
|
||||
DialogFontSize=12
|
||||
|
||||
[Files]
|
||||
Source: ".\app.exe"; DestDir: "{app}"; DestName: "{#MyAppExeName}" ; Flags: ignoreversion 64bit
|
||||
Source: "..\ollama.exe"; DestDir: "{app}"; Flags: ignoreversion 64bit
|
||||
Source: "..\dist\windows-{#ARCH}\*.dll"; DestDir: "{app}"; Flags: ignoreversion 64bit
|
||||
Source: "..\dist\windows-{#ARCH}\ollama_runners\*"; DestDir: "{app}\ollama_runners"; Flags: ignoreversion 64bit recursesubdirs
|
||||
Source: "..\dist\ollama_welcome.ps1"; DestDir: "{app}"; Flags: ignoreversion
|
||||
Source: ".\assets\app.ico"; DestDir: "{app}"; Flags: ignoreversion
|
||||
#if DirExists("..\dist\windows-amd64\rocm")
|
||||
Source: "..\dist\windows-amd64\rocm\*"; DestDir: "{app}\rocm\"; Flags: ignoreversion recursesubdirs
|
||||
#endif
|
||||
|
||||
|
||||
[Icons]
|
||||
Name: "{group}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico"
|
||||
Name: "{userstartup}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico"
|
||||
Name: "{userprograms}\{#MyAppName}"; Filename: "{app}\{#MyAppExeName}"; IconFilename: "{app}\app.ico"
|
||||
|
||||
[Run]
|
||||
Filename: "{cmd}"; Parameters: "/C set PATH={app};%PATH% & ""{app}\{#MyAppExeName}"""; Flags: postinstall nowait runhidden
|
||||
|
||||
[UninstallRun]
|
||||
; Filename: "{cmd}"; Parameters: "/C ""taskkill /im ''{#MyAppExeName}'' /f /t"; Flags: runhidden
|
||||
; Filename: "{cmd}"; Parameters: "/C ""taskkill /im ollama.exe /f /t"; Flags: runhidden
|
||||
Filename: "taskkill"; Parameters: "/im ""{#MyAppExeName}"" /f /t"; Flags: runhidden
|
||||
Filename: "taskkill"; Parameters: "/im ""ollama.exe"" /f /t"; Flags: runhidden
|
||||
; HACK! need to give the server and app enough time to exit
|
||||
; TODO - convert this to a Pascal code script so it waits until they're no longer running, then completes
|
||||
Filename: "{cmd}"; Parameters: "/c timeout 5"; Flags: runhidden
|
||||
|
||||
[UninstallDelete]
|
||||
Type: filesandordirs; Name: "{%TEMP}\ollama*"
|
||||
Type: filesandordirs; Name: "{%LOCALAPPDATA}\Ollama"
|
||||
Type: filesandordirs; Name: "{%LOCALAPPDATA}\Programs\Ollama"
|
||||
Type: filesandordirs; Name: "{%USERPROFILE}\.ollama\models"
|
||||
Type: filesandordirs; Name: "{%USERPROFILE}\.ollama\history"
|
||||
; NOTE: if the user has a custom OLLAMA_MODELS it will be preserved
|
||||
|
||||
[Messages]
|
||||
WizardReady=Ollama Windows Preview
|
||||
ReadyLabel1=%nLet's get you up and running with your own large language models.
|
||||
SetupAppRunningError=Another Ollama installer is running.%n%nPlease cancel or finish the other installer, then click OK to continue with this install, or Cancel to exit.
|
||||
|
||||
|
||||
;FinishedHeadingLabel=Run your first model
|
||||
;FinishedLabel=%nRun this command in a PowerShell or cmd terminal.%n%n%n ollama run llama3
|
||||
;ClickFinish=%n
|
||||
|
||||
[Registry]
|
||||
Root: HKCU; Subkey: "Environment"; \
|
||||
ValueType: expandsz; ValueName: "Path"; ValueData: "{olddata};{app}"; \
|
||||
Check: NeedsAddPath('{app}')
|
||||
|
||||
[Code]
|
||||
|
||||
function NeedsAddPath(Param: string): boolean;
|
||||
var
|
||||
OrigPath: string;
|
||||
begin
|
||||
if not RegQueryStringValue(HKEY_CURRENT_USER,
|
||||
'Environment',
|
||||
'Path', OrigPath)
|
||||
then begin
|
||||
Result := True;
|
||||
exit;
|
||||
end;
|
||||
{ look for the path with leading and trailing semicolon }
|
||||
{ Pos() returns 0 if not found }
|
||||
Result := Pos(';' + ExpandConstant(Param) + ';', ';' + OrigPath + ';') = 0;
|
||||
end;
|
@@ -1,29 +0,0 @@
|
||||
#include <winver.h>
|
||||
|
||||
VS_VERSION_INFO VERSIONINFO
|
||||
FILEFLAGSMASK 0x3fL
|
||||
#ifdef _DEBUG
|
||||
FILEFLAGS 0x1L
|
||||
#else
|
||||
FILEFLAGS 0x0L
|
||||
#endif
|
||||
FILEOS 0x40004L
|
||||
FILETYPE 0x1L
|
||||
FILESUBTYPE 0x0L
|
||||
BEGIN
|
||||
BLOCK "StringFileInfo"
|
||||
BEGIN
|
||||
BLOCK "040904b0"
|
||||
BEGIN
|
||||
VALUE "FileDescription", "Ollama"
|
||||
VALUE "InternalName", "Ollama"
|
||||
VALUE "OriginalFilename", "ollama app.exe"
|
||||
VALUE "ProductName", "Ollama"
|
||||
END
|
||||
END
|
||||
|
||||
BLOCK "VarFileInfo"
|
||||
BEGIN
|
||||
VALUE "Translation", 0x409, 1200
|
||||
END
|
||||
END
|
@@ -1,8 +0,0 @@
|
||||
# TODO - consider ANSI colors and maybe ASCII art...
|
||||
write-host ""
|
||||
write-host "Welcome to Ollama!"
|
||||
write-host ""
|
||||
write-host "Run your first model:"
|
||||
write-host ""
|
||||
write-host "`tollama run llama2"
|
||||
write-host ""
|
@@ -19,7 +19,7 @@ export default function () {
|
||||
const [step, setStep] = useState<Step>(Step.WELCOME)
|
||||
const [commandCopied, setCommandCopied] = useState<boolean>(false)
|
||||
|
||||
const command = 'ollama run llama3'
|
||||
const command = 'ollama run llama2'
|
||||
|
||||
return (
|
||||
<div className='drag'>
|
@@ -162,7 +162,7 @@ app.on('before-quit', () => {
|
||||
}
|
||||
})
|
||||
|
||||
const updateURL = `https://ollama.com/api/update?os=${process.platform}&arch=${
|
||||
const updateURL = `https://ollama.ai/api/update?os=${process.platform}&arch=${
|
||||
process.arch
|
||||
}&version=${app.getVersion()}&id=${id()}`
|
||||
|
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 17 KiB |
@@ -1,98 +0,0 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
|
||||
type Store struct {
|
||||
ID string `json:"id"`
|
||||
FirstTimeRun bool `json:"first-time-run"`
|
||||
}
|
||||
|
||||
var (
|
||||
lock sync.Mutex
|
||||
store Store
|
||||
)
|
||||
|
||||
func GetID() string {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
if store.ID == "" {
|
||||
initStore()
|
||||
}
|
||||
return store.ID
|
||||
|
||||
}
|
||||
|
||||
func GetFirstTimeRun() bool {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
if store.ID == "" {
|
||||
initStore()
|
||||
}
|
||||
return store.FirstTimeRun
|
||||
}
|
||||
|
||||
func SetFirstTimeRun(val bool) {
|
||||
lock.Lock()
|
||||
defer lock.Unlock()
|
||||
if store.FirstTimeRun == val {
|
||||
return
|
||||
}
|
||||
store.FirstTimeRun = val
|
||||
writeStore(getStorePath())
|
||||
}
|
||||
|
||||
// lock must be held
|
||||
func initStore() {
|
||||
storeFile, err := os.Open(getStorePath())
|
||||
if err == nil {
|
||||
defer storeFile.Close()
|
||||
err = json.NewDecoder(storeFile).Decode(&store)
|
||||
if err == nil {
|
||||
slog.Debug(fmt.Sprintf("loaded existing store %s - ID: %s", getStorePath(), store.ID))
|
||||
return
|
||||
}
|
||||
} else if !errors.Is(err, os.ErrNotExist) {
|
||||
slog.Debug(fmt.Sprintf("unexpected error searching for store: %s", err))
|
||||
}
|
||||
slog.Debug("initializing new store")
|
||||
store.ID = uuid.New().String()
|
||||
writeStore(getStorePath())
|
||||
}
|
||||
|
||||
func writeStore(storeFilename string) {
|
||||
ollamaDir := filepath.Dir(storeFilename)
|
||||
_, err := os.Stat(ollamaDir)
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
if err := os.MkdirAll(ollamaDir, 0o755); err != nil {
|
||||
slog.Error(fmt.Sprintf("create ollama dir %s: %v", ollamaDir, err))
|
||||
return
|
||||
}
|
||||
}
|
||||
payload, err := json.Marshal(store)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to marshal store: %s", err))
|
||||
return
|
||||
}
|
||||
fp, err := os.OpenFile(storeFilename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o755)
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("write store payload %s: %v", storeFilename, err))
|
||||
return
|
||||
}
|
||||
defer fp.Close()
|
||||
if n, err := fp.Write(payload); err != nil || n != len(payload) {
|
||||
slog.Error(fmt.Sprintf("write store payload %s: %d vs %d -- %v", storeFilename, n, len(payload), err))
|
||||
return
|
||||
}
|
||||
slog.Debug("Store contents: " + string(payload))
|
||||
slog.Info(fmt.Sprintf("wrote store: %s", storeFilename))
|
||||
}
|
@@ -1,13 +0,0 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func getStorePath() string {
|
||||
// TODO - system wide location?
|
||||
|
||||
home := os.Getenv("HOME")
|
||||
return filepath.Join(home, "Library", "Application Support", "Ollama", "config.json")
|
||||
}
|
@@ -1,16 +0,0 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func getStorePath() string {
|
||||
if os.Geteuid() == 0 {
|
||||
// TODO where should we store this on linux for system-wide operation?
|
||||
return "/etc/ollama/config.json"
|
||||
}
|
||||
|
||||
home := os.Getenv("HOME")
|
||||
return filepath.Join(home, ".ollama", "config.json")
|
||||
}
|
@@ -1,11 +0,0 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
func getStorePath() string {
|
||||
localAppData := os.Getenv("LOCALAPPDATA")
|
||||
return filepath.Join(localAppData, "Ollama", "config.json")
|
||||
}
|
@@ -1,24 +0,0 @@
|
||||
package commontray
|
||||
|
||||
var (
|
||||
Title = "Ollama"
|
||||
ToolTip = "Ollama"
|
||||
|
||||
UpdateIconName = "tray_upgrade"
|
||||
IconName = "tray"
|
||||
)
|
||||
|
||||
type Callbacks struct {
|
||||
Quit chan struct{}
|
||||
Update chan struct{}
|
||||
DoFirstUse chan struct{}
|
||||
ShowLogs chan struct{}
|
||||
}
|
||||
|
||||
type OllamaTray interface {
|
||||
GetCallbacks() Callbacks
|
||||
Run()
|
||||
UpdateAvailable(ver string) error
|
||||
DisplayFirstUseNotification() error
|
||||
Quit()
|
||||
}
|
@@ -1,28 +0,0 @@
|
||||
package tray
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"runtime"
|
||||
|
||||
"github.com/ollama/ollama/app/assets"
|
||||
"github.com/ollama/ollama/app/tray/commontray"
|
||||
)
|
||||
|
||||
func NewTray() (commontray.OllamaTray, error) {
|
||||
extension := ".png"
|
||||
if runtime.GOOS == "windows" {
|
||||
extension = ".ico"
|
||||
}
|
||||
iconName := commontray.UpdateIconName + extension
|
||||
updateIcon, err := assets.GetIcon(iconName)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load icon %s: %w", iconName, err)
|
||||
}
|
||||
iconName = commontray.IconName + extension
|
||||
icon, err := assets.GetIcon(iconName)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to load icon %s: %w", iconName, err)
|
||||
}
|
||||
|
||||
return InitPlatformTray(icon, updateIcon)
|
||||
}
|
@@ -1,13 +0,0 @@
|
||||
//go:build !windows
|
||||
|
||||
package tray
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/ollama/ollama/app/tray/commontray"
|
||||
)
|
||||
|
||||
func InitPlatformTray(icon, updateIcon []byte) (commontray.OllamaTray, error) {
|
||||
return nil, fmt.Errorf("NOT IMPLEMENTED YET")
|
||||
}
|
@@ -1,10 +0,0 @@
|
||||
package tray
|
||||
|
||||
import (
|
||||
"github.com/ollama/ollama/app/tray/commontray"
|
||||
"github.com/ollama/ollama/app/tray/wintray"
|
||||
)
|
||||
|
||||
func InitPlatformTray(icon, updateIcon []byte) (commontray.OllamaTray, error) {
|
||||
return wintray.InitTray(icon, updateIcon)
|
||||
}
|
@@ -1,184 +0,0 @@
|
||||
//go:build windows
|
||||
|
||||
package wintray
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"golang.org/x/sys/windows"
|
||||
)
|
||||
|
||||
var (
|
||||
quitOnce sync.Once
|
||||
)
|
||||
|
||||
func (t *winTray) Run() {
|
||||
nativeLoop()
|
||||
}
|
||||
|
||||
func nativeLoop() {
|
||||
// Main message pump.
|
||||
slog.Debug("starting event handling loop")
|
||||
m := &struct {
|
||||
WindowHandle windows.Handle
|
||||
Message uint32
|
||||
Wparam uintptr
|
||||
Lparam uintptr
|
||||
Time uint32
|
||||
Pt point
|
||||
LPrivate uint32
|
||||
}{}
|
||||
for {
|
||||
ret, _, err := pGetMessage.Call(uintptr(unsafe.Pointer(m)), 0, 0, 0)
|
||||
|
||||
// If the function retrieves a message other than WM_QUIT, the return value is nonzero.
|
||||
// If the function retrieves the WM_QUIT message, the return value is zero.
|
||||
// If there is an error, the return value is -1
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms644936(v=vs.85).aspx
|
||||
switch int32(ret) {
|
||||
case -1:
|
||||
slog.Error(fmt.Sprintf("get message failure: %v", err))
|
||||
return
|
||||
case 0:
|
||||
return
|
||||
default:
|
||||
pTranslateMessage.Call(uintptr(unsafe.Pointer(m))) //nolint:errcheck
|
||||
pDispatchMessage.Call(uintptr(unsafe.Pointer(m))) //nolint:errcheck
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WindowProc callback function that processes messages sent to a window.
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms633573(v=vs.85).aspx
|
||||
func (t *winTray) wndProc(hWnd windows.Handle, message uint32, wParam, lParam uintptr) (lResult uintptr) {
|
||||
const (
|
||||
WM_RBUTTONUP = 0x0205
|
||||
WM_LBUTTONUP = 0x0202
|
||||
WM_COMMAND = 0x0111
|
||||
WM_ENDSESSION = 0x0016
|
||||
WM_CLOSE = 0x0010
|
||||
WM_DESTROY = 0x0002
|
||||
WM_MOUSEMOVE = 0x0200
|
||||
WM_LBUTTONDOWN = 0x0201
|
||||
)
|
||||
switch message {
|
||||
case WM_COMMAND:
|
||||
menuItemId := int32(wParam)
|
||||
// https://docs.microsoft.com/en-us/windows/win32/menurc/wm-command#menus
|
||||
switch menuItemId {
|
||||
case quitMenuID:
|
||||
select {
|
||||
case t.callbacks.Quit <- struct{}{}:
|
||||
// should not happen but in case not listening
|
||||
default:
|
||||
slog.Error("no listener on Quit")
|
||||
}
|
||||
case updateMenuID:
|
||||
select {
|
||||
case t.callbacks.Update <- struct{}{}:
|
||||
// should not happen but in case not listening
|
||||
default:
|
||||
slog.Error("no listener on Update")
|
||||
}
|
||||
case diagLogsMenuID:
|
||||
select {
|
||||
case t.callbacks.ShowLogs <- struct{}{}:
|
||||
// should not happen but in case not listening
|
||||
default:
|
||||
slog.Error("no listener on ShowLogs")
|
||||
}
|
||||
default:
|
||||
slog.Debug(fmt.Sprintf("Unexpected menu item id: %d", menuItemId))
|
||||
}
|
||||
case WM_CLOSE:
|
||||
boolRet, _, err := pDestroyWindow.Call(uintptr(t.window))
|
||||
if boolRet == 0 {
|
||||
slog.Error(fmt.Sprintf("failed to destroy window: %s", err))
|
||||
}
|
||||
err = t.wcex.unregister()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to uregister windo %s", err))
|
||||
}
|
||||
case WM_DESTROY:
|
||||
// same as WM_ENDSESSION, but throws 0 exit code after all
|
||||
defer pPostQuitMessage.Call(uintptr(int32(0))) //nolint:errcheck
|
||||
fallthrough
|
||||
case WM_ENDSESSION:
|
||||
t.muNID.Lock()
|
||||
if t.nid != nil {
|
||||
err := t.nid.delete()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to delete nid: %s", err))
|
||||
}
|
||||
}
|
||||
t.muNID.Unlock()
|
||||
case t.wmSystrayMessage:
|
||||
switch lParam {
|
||||
case WM_MOUSEMOVE, WM_LBUTTONDOWN:
|
||||
// Ignore these...
|
||||
case WM_RBUTTONUP, WM_LBUTTONUP:
|
||||
err := t.showMenu()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to show menu: %s", err))
|
||||
}
|
||||
case 0x405: // TODO - how is this magic value derived for the notification left click
|
||||
if t.pendingUpdate {
|
||||
select {
|
||||
case t.callbacks.Update <- struct{}{}:
|
||||
// should not happen but in case not listening
|
||||
default:
|
||||
slog.Error("no listener on Update")
|
||||
}
|
||||
} else {
|
||||
select {
|
||||
case t.callbacks.DoFirstUse <- struct{}{}:
|
||||
// should not happen but in case not listening
|
||||
default:
|
||||
slog.Error("no listener on DoFirstUse")
|
||||
}
|
||||
}
|
||||
case 0x404: // Middle click or close notification
|
||||
// slog.Debug("doing nothing on close of first time notification")
|
||||
default:
|
||||
// 0x402 also seems common - what is it?
|
||||
slog.Debug(fmt.Sprintf("unmanaged app message, lParm: 0x%x", lParam))
|
||||
}
|
||||
case t.wmTaskbarCreated: // on explorer.exe restarts
|
||||
t.muNID.Lock()
|
||||
err := t.nid.add()
|
||||
if err != nil {
|
||||
slog.Error(fmt.Sprintf("failed to refresh the taskbar on explorer restart: %s", err))
|
||||
}
|
||||
t.muNID.Unlock()
|
||||
default:
|
||||
// Calls the default window procedure to provide default processing for any window messages that an application does not process.
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms633572(v=vs.85).aspx
|
||||
lResult, _, _ = pDefWindowProc.Call(
|
||||
uintptr(hWnd),
|
||||
uintptr(message),
|
||||
uintptr(wParam),
|
||||
uintptr(lParam),
|
||||
)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (t *winTray) Quit() {
|
||||
quitOnce.Do(quit)
|
||||
}
|
||||
|
||||
func quit() {
|
||||
boolRet, _, err := pPostMessage.Call(
|
||||
uintptr(wt.window),
|
||||
WM_CLOSE,
|
||||
0,
|
||||
0,
|
||||
)
|
||||
if boolRet == 0 {
|
||||
slog.Error(fmt.Sprintf("failed to post close message on shutdown %s", err))
|
||||
}
|
||||
}
|
@@ -1,71 +0,0 @@
|
||||
//go:build windows
|
||||
|
||||
package wintray
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"unsafe"
|
||||
|
||||
"golang.org/x/sys/windows"
|
||||
)
|
||||
|
||||
const (
|
||||
updatAvailableMenuID = 1
|
||||
updateMenuID = updatAvailableMenuID + 1
|
||||
separatorMenuID = updateMenuID + 1
|
||||
diagLogsMenuID = separatorMenuID + 1
|
||||
diagSeparatorMenuID = diagLogsMenuID + 1
|
||||
quitMenuID = diagSeparatorMenuID + 1
|
||||
)
|
||||
|
||||
func (t *winTray) initMenus() error {
|
||||
if err := t.addOrUpdateMenuItem(diagLogsMenuID, 0, diagLogsMenuTitle, false); err != nil {
|
||||
return fmt.Errorf("unable to create menu entries %w\n", err)
|
||||
}
|
||||
if err := t.addSeparatorMenuItem(diagSeparatorMenuID, 0); err != nil {
|
||||
return fmt.Errorf("unable to create menu entries %w", err)
|
||||
}
|
||||
if err := t.addOrUpdateMenuItem(quitMenuID, 0, quitMenuTitle, false); err != nil {
|
||||
return fmt.Errorf("unable to create menu entries %w\n", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *winTray) UpdateAvailable(ver string) error {
|
||||
if !t.updateNotified {
|
||||
slog.Debug("updating menu and sending notification for new update")
|
||||
if err := t.addOrUpdateMenuItem(updatAvailableMenuID, 0, updateAvailableMenuTitle, true); err != nil {
|
||||
return fmt.Errorf("unable to create menu entries %w", err)
|
||||
}
|
||||
if err := t.addOrUpdateMenuItem(updateMenuID, 0, updateMenutTitle, false); err != nil {
|
||||
return fmt.Errorf("unable to create menu entries %w", err)
|
||||
}
|
||||
if err := t.addSeparatorMenuItem(separatorMenuID, 0); err != nil {
|
||||
return fmt.Errorf("unable to create menu entries %w", err)
|
||||
}
|
||||
iconFilePath, err := iconBytesToFilePath(wt.updateIcon)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to write icon data to temp file: %w", err)
|
||||
}
|
||||
if err := wt.setIcon(iconFilePath); err != nil {
|
||||
return fmt.Errorf("unable to set icon: %w", err)
|
||||
}
|
||||
t.updateNotified = true
|
||||
|
||||
t.pendingUpdate = true
|
||||
// Now pop up the notification
|
||||
t.muNID.Lock()
|
||||
defer t.muNID.Unlock()
|
||||
copy(t.nid.InfoTitle[:], windows.StringToUTF16(updateTitle))
|
||||
copy(t.nid.Info[:], windows.StringToUTF16(fmt.Sprintf(updateMessage, ver)))
|
||||
t.nid.Flags |= NIF_INFO
|
||||
t.nid.Timeout = 10
|
||||
t.nid.Size = uint32(unsafe.Sizeof(*wt.nid))
|
||||
err = t.nid.modify()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
@@ -1,15 +0,0 @@
|
||||
//go:build windows
|
||||
|
||||
package wintray
|
||||
|
||||
const (
|
||||
firstTimeTitle = "Ollama is running"
|
||||
firstTimeMessage = "Click here to get started"
|
||||
updateTitle = "Update available"
|
||||
updateMessage = "Ollama version %s is ready to install"
|
||||
|
||||
quitMenuTitle = "Quit Ollama"
|
||||
updateAvailableMenuTitle = "An update is available"
|
||||
updateMenutTitle = "Restart to update"
|
||||
diagLogsMenuTitle = "View logs"
|
||||
)
|
@@ -1,66 +0,0 @@
|
||||
//go:build windows
|
||||
|
||||
package wintray
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"golang.org/x/sys/windows"
|
||||
)
|
||||
|
||||
// Contains information that the system needs to display notifications in the notification area.
|
||||
// Used by Shell_NotifyIcon.
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/bb773352(v=vs.85).aspx
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/bb762159
|
||||
type notifyIconData struct {
|
||||
Size uint32
|
||||
Wnd windows.Handle
|
||||
ID, Flags, CallbackMessage uint32
|
||||
Icon windows.Handle
|
||||
Tip [128]uint16
|
||||
State, StateMask uint32
|
||||
Info [256]uint16
|
||||
// Timeout, Version uint32
|
||||
Timeout uint32
|
||||
|
||||
InfoTitle [64]uint16
|
||||
InfoFlags uint32
|
||||
GuidItem windows.GUID
|
||||
BalloonIcon windows.Handle
|
||||
}
|
||||
|
||||
func (nid *notifyIconData) add() error {
|
||||
const NIM_ADD = 0x00000000
|
||||
res, _, err := pShellNotifyIcon.Call(
|
||||
uintptr(NIM_ADD),
|
||||
uintptr(unsafe.Pointer(nid)),
|
||||
)
|
||||
if res == 0 {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (nid *notifyIconData) modify() error {
|
||||
const NIM_MODIFY = 0x00000001
|
||||
res, _, err := pShellNotifyIcon.Call(
|
||||
uintptr(NIM_MODIFY),
|
||||
uintptr(unsafe.Pointer(nid)),
|
||||
)
|
||||
if res == 0 {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (nid *notifyIconData) delete() error {
|
||||
const NIM_DELETE = 0x00000002
|
||||
res, _, err := pShellNotifyIcon.Call(
|
||||
uintptr(NIM_DELETE),
|
||||
uintptr(unsafe.Pointer(nid)),
|
||||
)
|
||||
if res == 0 {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
@@ -1,485 +0,0 @@
|
||||
//go:build windows
|
||||
|
||||
package wintray
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/ollama/ollama/app/tray/commontray"
|
||||
"golang.org/x/sys/windows"
|
||||
)
|
||||
|
||||
// Helpful sources: https://github.com/golang/exp/blob/master/shiny/driver/internal/win32
|
||||
|
||||
// Contains information about loaded resources
|
||||
type winTray struct {
|
||||
instance,
|
||||
icon,
|
||||
cursor,
|
||||
window windows.Handle
|
||||
|
||||
loadedImages map[string]windows.Handle
|
||||
muLoadedImages sync.RWMutex
|
||||
|
||||
// menus keeps track of the submenus keyed by the menu item ID, plus 0
|
||||
// which corresponds to the main popup menu.
|
||||
menus map[uint32]windows.Handle
|
||||
muMenus sync.RWMutex
|
||||
menuOf map[uint32]windows.Handle
|
||||
muMenuOf sync.RWMutex
|
||||
// menuItemIcons maintains the bitmap of each menu item (if applies). It's
|
||||
// needed to show the icon correctly when showing a previously hidden menu
|
||||
// item again.
|
||||
// menuItemIcons map[uint32]windows.Handle
|
||||
// muMenuItemIcons sync.RWMutex
|
||||
visibleItems map[uint32][]uint32
|
||||
muVisibleItems sync.RWMutex
|
||||
|
||||
nid *notifyIconData
|
||||
muNID sync.RWMutex
|
||||
wcex *wndClassEx
|
||||
|
||||
wmSystrayMessage,
|
||||
wmTaskbarCreated uint32
|
||||
|
||||
pendingUpdate bool
|
||||
updateNotified bool // Only pop up the notification once - TODO consider daily nag?
|
||||
// Callbacks
|
||||
callbacks commontray.Callbacks
|
||||
normalIcon []byte
|
||||
updateIcon []byte
|
||||
}
|
||||
|
||||
var wt winTray
|
||||
|
||||
func (t *winTray) GetCallbacks() commontray.Callbacks {
|
||||
return t.callbacks
|
||||
}
|
||||
|
||||
func InitTray(icon, updateIcon []byte) (*winTray, error) {
|
||||
wt.callbacks.Quit = make(chan struct{})
|
||||
wt.callbacks.Update = make(chan struct{})
|
||||
wt.callbacks.ShowLogs = make(chan struct{})
|
||||
wt.callbacks.DoFirstUse = make(chan struct{})
|
||||
wt.normalIcon = icon
|
||||
wt.updateIcon = updateIcon
|
||||
if err := wt.initInstance(); err != nil {
|
||||
return nil, fmt.Errorf("Unable to init instance: %w\n", err)
|
||||
}
|
||||
|
||||
if err := wt.createMenu(); err != nil {
|
||||
return nil, fmt.Errorf("Unable to create menu: %w\n", err)
|
||||
}
|
||||
|
||||
iconFilePath, err := iconBytesToFilePath(wt.normalIcon)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Unable to write icon data to temp file: %w", err)
|
||||
}
|
||||
if err := wt.setIcon(iconFilePath); err != nil {
|
||||
return nil, fmt.Errorf("Unable to set icon: %w", err)
|
||||
}
|
||||
|
||||
return &wt, wt.initMenus()
|
||||
}
|
||||
|
||||
func (t *winTray) initInstance() error {
|
||||
const (
|
||||
className = "OllamaClass"
|
||||
windowName = ""
|
||||
)
|
||||
|
||||
t.wmSystrayMessage = WM_USER + 1
|
||||
t.visibleItems = make(map[uint32][]uint32)
|
||||
t.menus = make(map[uint32]windows.Handle)
|
||||
t.menuOf = make(map[uint32]windows.Handle)
|
||||
|
||||
t.loadedImages = make(map[string]windows.Handle)
|
||||
|
||||
taskbarEventNamePtr, _ := windows.UTF16PtrFromString("TaskbarCreated")
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms644947
|
||||
res, _, err := pRegisterWindowMessage.Call(
|
||||
uintptr(unsafe.Pointer(taskbarEventNamePtr)),
|
||||
)
|
||||
if res == 0 { // success 0xc000-0xfff
|
||||
return fmt.Errorf("failed to register window: %w", err)
|
||||
}
|
||||
t.wmTaskbarCreated = uint32(res)
|
||||
|
||||
instanceHandle, _, err := pGetModuleHandle.Call(0)
|
||||
if instanceHandle == 0 {
|
||||
return err
|
||||
}
|
||||
t.instance = windows.Handle(instanceHandle)
|
||||
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms648072(v=vs.85).aspx
|
||||
iconHandle, _, err := pLoadIcon.Call(0, uintptr(IDI_APPLICATION))
|
||||
if iconHandle == 0 {
|
||||
return err
|
||||
}
|
||||
t.icon = windows.Handle(iconHandle)
|
||||
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms648391(v=vs.85).aspx
|
||||
cursorHandle, _, err := pLoadCursor.Call(0, uintptr(IDC_ARROW))
|
||||
if cursorHandle == 0 {
|
||||
return err
|
||||
}
|
||||
t.cursor = windows.Handle(cursorHandle)
|
||||
|
||||
classNamePtr, err := windows.UTF16PtrFromString(className)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
windowNamePtr, err := windows.UTF16PtrFromString(windowName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
t.wcex = &wndClassEx{
|
||||
Style: CS_HREDRAW | CS_VREDRAW,
|
||||
WndProc: windows.NewCallback(t.wndProc),
|
||||
Instance: t.instance,
|
||||
Icon: t.icon,
|
||||
Cursor: t.cursor,
|
||||
Background: windows.Handle(6), // (COLOR_WINDOW + 1)
|
||||
ClassName: classNamePtr,
|
||||
IconSm: t.icon,
|
||||
}
|
||||
if err := t.wcex.register(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
windowHandle, _, err := pCreateWindowEx.Call(
|
||||
uintptr(0),
|
||||
uintptr(unsafe.Pointer(classNamePtr)),
|
||||
uintptr(unsafe.Pointer(windowNamePtr)),
|
||||
uintptr(WS_OVERLAPPEDWINDOW),
|
||||
uintptr(CW_USEDEFAULT),
|
||||
uintptr(CW_USEDEFAULT),
|
||||
uintptr(CW_USEDEFAULT),
|
||||
uintptr(CW_USEDEFAULT),
|
||||
uintptr(0),
|
||||
uintptr(0),
|
||||
uintptr(t.instance),
|
||||
uintptr(0),
|
||||
)
|
||||
if windowHandle == 0 {
|
||||
return err
|
||||
}
|
||||
t.window = windows.Handle(windowHandle)
|
||||
|
||||
pShowWindow.Call(uintptr(t.window), uintptr(SW_HIDE)) //nolint:errcheck
|
||||
|
||||
boolRet, _, err := pUpdateWindow.Call(uintptr(t.window))
|
||||
if boolRet == 0 {
|
||||
slog.Error(fmt.Sprintf("failed to update window: %s", err))
|
||||
}
|
||||
|
||||
t.muNID.Lock()
|
||||
defer t.muNID.Unlock()
|
||||
t.nid = ¬ifyIconData{
|
||||
Wnd: windows.Handle(t.window),
|
||||
ID: 100,
|
||||
Flags: NIF_MESSAGE,
|
||||
CallbackMessage: t.wmSystrayMessage,
|
||||
}
|
||||
t.nid.Size = uint32(unsafe.Sizeof(*t.nid))
|
||||
|
||||
return t.nid.add()
|
||||
}
|
||||
|
||||
func (t *winTray) createMenu() error {
|
||||
|
||||
menuHandle, _, err := pCreatePopupMenu.Call()
|
||||
if menuHandle == 0 {
|
||||
return err
|
||||
}
|
||||
t.menus[0] = windows.Handle(menuHandle)
|
||||
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms647575(v=vs.85).aspx
|
||||
mi := struct {
|
||||
Size, Mask, Style, Max uint32
|
||||
Background windows.Handle
|
||||
ContextHelpID uint32
|
||||
MenuData uintptr
|
||||
}{
|
||||
Mask: MIM_APPLYTOSUBMENUS,
|
||||
}
|
||||
mi.Size = uint32(unsafe.Sizeof(mi))
|
||||
|
||||
res, _, err := pSetMenuInfo.Call(
|
||||
uintptr(t.menus[0]),
|
||||
uintptr(unsafe.Pointer(&mi)),
|
||||
)
|
||||
if res == 0 {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Contains information about a menu item.
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/ms647578(v=vs.85).aspx
|
||||
type menuItemInfo struct {
|
||||
Size, Mask, Type, State uint32
|
||||
ID uint32
|
||||
SubMenu, Checked, Unchecked windows.Handle
|
||||
ItemData uintptr
|
||||
TypeData *uint16
|
||||
Cch uint32
|
||||
BMPItem windows.Handle
|
||||
}
|
||||
|
||||
func (t *winTray) addOrUpdateMenuItem(menuItemId uint32, parentId uint32, title string, disabled bool) error {
|
||||
titlePtr, err := windows.UTF16PtrFromString(title)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
mi := menuItemInfo{
|
||||
Mask: MIIM_FTYPE | MIIM_STRING | MIIM_ID | MIIM_STATE,
|
||||
Type: MFT_STRING,
|
||||
ID: uint32(menuItemId),
|
||||
TypeData: titlePtr,
|
||||
Cch: uint32(len(title)),
|
||||
}
|
||||
mi.Size = uint32(unsafe.Sizeof(mi))
|
||||
if disabled {
|
||||
mi.State |= MFS_DISABLED
|
||||
}
|
||||
|
||||
var res uintptr
|
||||
t.muMenus.RLock()
|
||||
menu := t.menus[parentId]
|
||||
t.muMenus.RUnlock()
|
||||
if t.getVisibleItemIndex(parentId, menuItemId) != -1 {
|
||||
// We set the menu item info based on the menuID
|
||||
boolRet, _, err := pSetMenuItemInfo.Call(
|
||||
uintptr(menu),
|
||||
uintptr(menuItemId),
|
||||
0,
|
||||
uintptr(unsafe.Pointer(&mi)),
|
||||
)
|
||||
if boolRet == 0 {
|
||||
return fmt.Errorf("failed to set menu item: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if res == 0 {
|
||||
// Menu item does not already exist, create it
|
||||
t.muMenus.RLock()
|
||||
submenu, exists := t.menus[menuItemId]
|
||||
t.muMenus.RUnlock()
|
||||
if exists {
|
||||
mi.Mask |= MIIM_SUBMENU
|
||||
mi.SubMenu = submenu
|
||||
}
|
||||
t.addToVisibleItems(parentId, menuItemId)
|
||||
position := t.getVisibleItemIndex(parentId, menuItemId)
|
||||
res, _, err = pInsertMenuItem.Call(
|
||||
uintptr(menu),
|
||||
uintptr(position),
|
||||
1,
|
||||
uintptr(unsafe.Pointer(&mi)),
|
||||
)
|
||||
if res == 0 {
|
||||
t.delFromVisibleItems(parentId, menuItemId)
|
||||
return err
|
||||
}
|
||||
t.muMenuOf.Lock()
|
||||
t.menuOf[menuItemId] = menu
|
||||
t.muMenuOf.Unlock()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *winTray) addSeparatorMenuItem(menuItemId, parentId uint32) error {
|
||||
|
||||
mi := menuItemInfo{
|
||||
Mask: MIIM_FTYPE | MIIM_ID | MIIM_STATE,
|
||||
Type: MFT_SEPARATOR,
|
||||
ID: uint32(menuItemId),
|
||||
}
|
||||
|
||||
mi.Size = uint32(unsafe.Sizeof(mi))
|
||||
|
||||
t.addToVisibleItems(parentId, menuItemId)
|
||||
position := t.getVisibleItemIndex(parentId, menuItemId)
|
||||
t.muMenus.RLock()
|
||||
menu := uintptr(t.menus[parentId])
|
||||
t.muMenus.RUnlock()
|
||||
res, _, err := pInsertMenuItem.Call(
|
||||
menu,
|
||||
uintptr(position),
|
||||
1,
|
||||
uintptr(unsafe.Pointer(&mi)),
|
||||
)
|
||||
if res == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// func (t *winTray) hideMenuItem(menuItemId, parentId uint32) error {
|
||||
// const ERROR_SUCCESS syscall.Errno = 0
|
||||
|
||||
// t.muMenus.RLock()
|
||||
// menu := uintptr(t.menus[parentId])
|
||||
// t.muMenus.RUnlock()
|
||||
// res, _, err := pRemoveMenu.Call(
|
||||
// menu,
|
||||
// uintptr(menuItemId),
|
||||
// MF_BYCOMMAND,
|
||||
// )
|
||||
// if res == 0 && err.(syscall.Errno) != ERROR_SUCCESS {
|
||||
// return err
|
||||
// }
|
||||
// t.delFromVisibleItems(parentId, menuItemId)
|
||||
|
||||
// return nil
|
||||
// }
|
||||
|
||||
func (t *winTray) showMenu() error {
|
||||
p := point{}
|
||||
boolRet, _, err := pGetCursorPos.Call(uintptr(unsafe.Pointer(&p)))
|
||||
if boolRet == 0 {
|
||||
return err
|
||||
}
|
||||
boolRet, _, err = pSetForegroundWindow.Call(uintptr(t.window))
|
||||
if boolRet == 0 {
|
||||
slog.Warn(fmt.Sprintf("failed to bring menu to foreground: %s", err))
|
||||
}
|
||||
|
||||
boolRet, _, err = pTrackPopupMenu.Call(
|
||||
uintptr(t.menus[0]),
|
||||
TPM_BOTTOMALIGN|TPM_LEFTALIGN,
|
||||
uintptr(p.X),
|
||||
uintptr(p.Y),
|
||||
0,
|
||||
uintptr(t.window),
|
||||
0,
|
||||
)
|
||||
if boolRet == 0 {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *winTray) delFromVisibleItems(parent, val uint32) {
|
||||
t.muVisibleItems.Lock()
|
||||
defer t.muVisibleItems.Unlock()
|
||||
visibleItems := t.visibleItems[parent]
|
||||
for i, itemval := range visibleItems {
|
||||
if val == itemval {
|
||||
t.visibleItems[parent] = append(visibleItems[:i], visibleItems[i+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *winTray) addToVisibleItems(parent, val uint32) {
|
||||
t.muVisibleItems.Lock()
|
||||
defer t.muVisibleItems.Unlock()
|
||||
if visibleItems, exists := t.visibleItems[parent]; !exists {
|
||||
t.visibleItems[parent] = []uint32{val}
|
||||
} else {
|
||||
newvisible := append(visibleItems, val)
|
||||
sort.Slice(newvisible, func(i, j int) bool { return newvisible[i] < newvisible[j] })
|
||||
t.visibleItems[parent] = newvisible
|
||||
}
|
||||
}
|
||||
|
||||
func (t *winTray) getVisibleItemIndex(parent, val uint32) int {
|
||||
t.muVisibleItems.RLock()
|
||||
defer t.muVisibleItems.RUnlock()
|
||||
for i, itemval := range t.visibleItems[parent] {
|
||||
if val == itemval {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func iconBytesToFilePath(iconBytes []byte) (string, error) {
|
||||
bh := md5.Sum(iconBytes)
|
||||
dataHash := hex.EncodeToString(bh[:])
|
||||
iconFilePath := filepath.Join(os.TempDir(), "ollama_temp_icon_"+dataHash)
|
||||
|
||||
if _, err := os.Stat(iconFilePath); os.IsNotExist(err) {
|
||||
if err := os.WriteFile(iconFilePath, iconBytes, 0644); err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
return iconFilePath, nil
|
||||
}
|
||||
|
||||
// Loads an image from file and shows it in tray.
|
||||
// Shell_NotifyIcon: https://msdn.microsoft.com/en-us/library/windows/desktop/bb762159(v=vs.85).aspx
|
||||
func (t *winTray) setIcon(src string) error {
|
||||
|
||||
h, err := t.loadIconFrom(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
t.muNID.Lock()
|
||||
defer t.muNID.Unlock()
|
||||
t.nid.Icon = h
|
||||
t.nid.Flags |= NIF_ICON
|
||||
t.nid.Size = uint32(unsafe.Sizeof(*t.nid))
|
||||
|
||||
return t.nid.modify()
|
||||
}
|
||||
|
||||
// Loads an image from file to be shown in tray or menu item.
|
||||
// LoadImage: https://msdn.microsoft.com/en-us/library/windows/desktop/ms648045(v=vs.85).aspx
|
||||
func (t *winTray) loadIconFrom(src string) (windows.Handle, error) {
|
||||
|
||||
// Save and reuse handles of loaded images
|
||||
t.muLoadedImages.RLock()
|
||||
h, ok := t.loadedImages[src]
|
||||
t.muLoadedImages.RUnlock()
|
||||
if !ok {
|
||||
srcPtr, err := windows.UTF16PtrFromString(src)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
res, _, err := pLoadImage.Call(
|
||||
0,
|
||||
uintptr(unsafe.Pointer(srcPtr)),
|
||||
IMAGE_ICON,
|
||||
0,
|
||||
0,
|
||||
LR_LOADFROMFILE|LR_DEFAULTSIZE,
|
||||
)
|
||||
if res == 0 {
|
||||
return 0, err
|
||||
}
|
||||
h = windows.Handle(res)
|
||||
t.muLoadedImages.Lock()
|
||||
t.loadedImages[src] = h
|
||||
t.muLoadedImages.Unlock()
|
||||
}
|
||||
return h, nil
|
||||
}
|
||||
|
||||
func (t *winTray) DisplayFirstUseNotification() error {
|
||||
t.muNID.Lock()
|
||||
defer t.muNID.Unlock()
|
||||
copy(t.nid.InfoTitle[:], windows.StringToUTF16(firstTimeTitle))
|
||||
copy(t.nid.Info[:], windows.StringToUTF16(firstTimeMessage))
|
||||
t.nid.Flags |= NIF_INFO
|
||||
t.nid.Size = uint32(unsafe.Sizeof(*wt.nid))
|
||||
|
||||
return t.nid.modify()
|
||||
}
|
@@ -1,89 +0,0 @@
|
||||
//go:build windows
|
||||
|
||||
package wintray
|
||||
|
||||
import (
|
||||
"runtime"
|
||||
|
||||
"golang.org/x/sys/windows"
|
||||
)
|
||||
|
||||
var (
|
||||
k32 = windows.NewLazySystemDLL("Kernel32.dll")
|
||||
u32 = windows.NewLazySystemDLL("User32.dll")
|
||||
s32 = windows.NewLazySystemDLL("Shell32.dll")
|
||||
|
||||
pCreatePopupMenu = u32.NewProc("CreatePopupMenu")
|
||||
pCreateWindowEx = u32.NewProc("CreateWindowExW")
|
||||
pDefWindowProc = u32.NewProc("DefWindowProcW")
|
||||
pDestroyWindow = u32.NewProc("DestroyWindow")
|
||||
pDispatchMessage = u32.NewProc("DispatchMessageW")
|
||||
pGetCursorPos = u32.NewProc("GetCursorPos")
|
||||
pGetMessage = u32.NewProc("GetMessageW")
|
||||
pGetModuleHandle = k32.NewProc("GetModuleHandleW")
|
||||
pInsertMenuItem = u32.NewProc("InsertMenuItemW")
|
||||
pLoadCursor = u32.NewProc("LoadCursorW")
|
||||
pLoadIcon = u32.NewProc("LoadIconW")
|
||||
pLoadImage = u32.NewProc("LoadImageW")
|
||||
pPostMessage = u32.NewProc("PostMessageW")
|
||||
pPostQuitMessage = u32.NewProc("PostQuitMessage")
|
||||
pRegisterClass = u32.NewProc("RegisterClassExW")
|
||||
pRegisterWindowMessage = u32.NewProc("RegisterWindowMessageW")
|
||||
pSetForegroundWindow = u32.NewProc("SetForegroundWindow")
|
||||
pSetMenuInfo = u32.NewProc("SetMenuInfo")
|
||||
pSetMenuItemInfo = u32.NewProc("SetMenuItemInfoW")
|
||||
pShellNotifyIcon = s32.NewProc("Shell_NotifyIconW")
|
||||
pShowWindow = u32.NewProc("ShowWindow")
|
||||
pTrackPopupMenu = u32.NewProc("TrackPopupMenu")
|
||||
pTranslateMessage = u32.NewProc("TranslateMessage")
|
||||
pUnregisterClass = u32.NewProc("UnregisterClassW")
|
||||
pUpdateWindow = u32.NewProc("UpdateWindow")
|
||||
)
|
||||
|
||||
const (
|
||||
CS_HREDRAW = 0x0002
|
||||
CS_VREDRAW = 0x0001
|
||||
CW_USEDEFAULT = 0x80000000
|
||||
IDC_ARROW = 32512 // Standard arrow
|
||||
IDI_APPLICATION = 32512
|
||||
IMAGE_ICON = 1 // Loads an icon
|
||||
LR_DEFAULTSIZE = 0x00000040 // Loads default-size icon for windows(SM_CXICON x SM_CYICON) if cx, cy are set to zero
|
||||
LR_LOADFROMFILE = 0x00000010 // Loads the stand-alone image from the file
|
||||
MF_BYCOMMAND = 0x00000000
|
||||
MFS_DISABLED = 0x00000003
|
||||
MFT_SEPARATOR = 0x00000800
|
||||
MFT_STRING = 0x00000000
|
||||
MIIM_BITMAP = 0x00000080
|
||||
MIIM_FTYPE = 0x00000100
|
||||
MIIM_ID = 0x00000002
|
||||
MIIM_STATE = 0x00000001
|
||||
MIIM_STRING = 0x00000040
|
||||
MIIM_SUBMENU = 0x00000004
|
||||
MIM_APPLYTOSUBMENUS = 0x80000000
|
||||
NIF_ICON = 0x00000002
|
||||
NIF_INFO = 0x00000010
|
||||
NIF_MESSAGE = 0x00000001
|
||||
SW_HIDE = 0
|
||||
TPM_BOTTOMALIGN = 0x0020
|
||||
TPM_LEFTALIGN = 0x0000
|
||||
WM_CLOSE = 0x0010
|
||||
WM_USER = 0x0400
|
||||
WS_CAPTION = 0x00C00000
|
||||
WS_MAXIMIZEBOX = 0x00010000
|
||||
WS_MINIMIZEBOX = 0x00020000
|
||||
WS_OVERLAPPED = 0x00000000
|
||||
WS_OVERLAPPEDWINDOW = WS_OVERLAPPED | WS_CAPTION | WS_SYSMENU | WS_THICKFRAME | WS_MINIMIZEBOX | WS_MAXIMIZEBOX
|
||||
WS_SYSMENU = 0x00080000
|
||||
WS_THICKFRAME = 0x00040000
|
||||
)
|
||||
|
||||
// Not sure if this is actually needed on windows
|
||||
func init() {
|
||||
runtime.LockOSThread()
|
||||
}
|
||||
|
||||
// The POINT structure defines the x- and y- coordinates of a point.
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/dd162805(v=vs.85).aspx
|
||||
type point struct {
|
||||
X, Y int32
|
||||
}
|
@@ -1,45 +0,0 @@
|
||||
//go:build windows
|
||||
|
||||
package wintray
|
||||
|
||||
import (
|
||||
"unsafe"
|
||||
|
||||
"golang.org/x/sys/windows"
|
||||
)
|
||||
|
||||
// Contains window class information.
|
||||
// It is used with the RegisterClassEx and GetClassInfoEx functions.
|
||||
// https://msdn.microsoft.com/en-us/library/ms633577.aspx
|
||||
type wndClassEx struct {
|
||||
Size, Style uint32
|
||||
WndProc uintptr
|
||||
ClsExtra, WndExtra int32
|
||||
Instance, Icon, Cursor, Background windows.Handle
|
||||
MenuName, ClassName *uint16
|
||||
IconSm windows.Handle
|
||||
}
|
||||
|
||||
// Registers a window class for subsequent use in calls to the CreateWindow or CreateWindowEx function.
|
||||
// https://msdn.microsoft.com/en-us/library/ms633587.aspx
|
||||
func (w *wndClassEx) register() error {
|
||||
w.Size = uint32(unsafe.Sizeof(*w))
|
||||
res, _, err := pRegisterClass.Call(uintptr(unsafe.Pointer(w)))
|
||||
if res == 0 {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Unregisters a window class, freeing the memory required for the class.
|
||||
// https://msdn.microsoft.com/en-us/library/ms644899.aspx
|
||||
func (w *wndClassEx) unregister() error {
|
||||
res, _, err := pUnregisterClass.Call(
|
||||
uintptr(unsafe.Pointer(w.ClassName)),
|
||||
uintptr(w.Instance),
|
||||
)
|
||||
if res == 0 {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
91
auth/auth.go
@@ -1,91 +0,0 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/crypto/ssh"
|
||||
)
|
||||
|
||||
const defaultPrivateKey = "id_ed25519"
|
||||
|
||||
func keyPath() (string, error) {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return filepath.Join(home, ".ollama", defaultPrivateKey), nil
|
||||
}
|
||||
|
||||
func GetPublicKey() (string, error) {
|
||||
keyPath, err := keyPath()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
privateKeyFile, err := os.ReadFile(keyPath)
|
||||
if err != nil {
|
||||
slog.Info(fmt.Sprintf("Failed to load private key: %v", err))
|
||||
return "", err
|
||||
}
|
||||
|
||||
privateKey, err := ssh.ParsePrivateKey(privateKeyFile)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
publicKey := ssh.MarshalAuthorizedKey(privateKey.PublicKey())
|
||||
|
||||
return strings.TrimSpace(string(publicKey)), nil
|
||||
}
|
||||
|
||||
func NewNonce(r io.Reader, length int) (string, error) {
|
||||
nonce := make([]byte, length)
|
||||
if _, err := io.ReadFull(r, nonce); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return base64.RawURLEncoding.EncodeToString(nonce), nil
|
||||
}
|
||||
|
||||
func Sign(ctx context.Context, bts []byte) (string, error) {
|
||||
keyPath, err := keyPath()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
privateKeyFile, err := os.ReadFile(keyPath)
|
||||
if err != nil {
|
||||
slog.Info(fmt.Sprintf("Failed to load private key: %v", err))
|
||||
return "", err
|
||||
}
|
||||
|
||||
privateKey, err := ssh.ParsePrivateKey(privateKeyFile)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// get the pubkey, but remove the type
|
||||
publicKey := ssh.MarshalAuthorizedKey(privateKey.PublicKey())
|
||||
parts := bytes.Split(publicKey, []byte(" "))
|
||||
if len(parts) < 2 {
|
||||
return "", fmt.Errorf("malformed public key")
|
||||
}
|
||||
|
||||
signedData, err := privateKey.Sign(rand.Reader, bts)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// signature is <pubkey>:<signature>
|
||||
return fmt.Sprintf("%s:%s", bytes.TrimSpace(parts[1]), base64.StdEncoding.EncodeToString(signedData.Blob)), nil
|
||||
}
|
615
cmd/cmd.go
@@ -1,7 +1,6 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/ed25519"
|
||||
@@ -12,36 +11,28 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"math"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/containerd/console"
|
||||
"github.com/mattn/go-runewidth"
|
||||
"github.com/olekukonko/tablewriter"
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/crypto/ssh"
|
||||
"golang.org/x/exp/slices"
|
||||
"golang.org/x/term"
|
||||
|
||||
"github.com/ollama/ollama/api"
|
||||
"github.com/ollama/ollama/auth"
|
||||
"github.com/ollama/ollama/envconfig"
|
||||
"github.com/ollama/ollama/format"
|
||||
"github.com/ollama/ollama/parser"
|
||||
"github.com/ollama/ollama/progress"
|
||||
"github.com/ollama/ollama/server"
|
||||
"github.com/ollama/ollama/types/errtypes"
|
||||
"github.com/ollama/ollama/types/model"
|
||||
"github.com/ollama/ollama/version"
|
||||
"github.com/jmorganca/ollama/api"
|
||||
"github.com/jmorganca/ollama/format"
|
||||
"github.com/jmorganca/ollama/parser"
|
||||
"github.com/jmorganca/ollama/progress"
|
||||
"github.com/jmorganca/ollama/server"
|
||||
"github.com/jmorganca/ollama/version"
|
||||
)
|
||||
|
||||
func CreateHandler(cmd *cobra.Command, args []string) error {
|
||||
@@ -59,13 +50,14 @@ func CreateHandler(cmd *cobra.Command, args []string) error {
|
||||
p := progress.NewProgress(os.Stderr)
|
||||
defer p.Stop()
|
||||
|
||||
f, err := os.Open(filename)
|
||||
bars := make(map[string]*progress.Bar)
|
||||
|
||||
modelfile, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
modelfile, err := parser.ParseFile(f)
|
||||
commands, err := parser.Parse(bytes.NewReader(modelfile))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -79,10 +71,10 @@ func CreateHandler(cmd *cobra.Command, args []string) error {
|
||||
spinner := progress.NewSpinner(status)
|
||||
p.Add(status, spinner)
|
||||
|
||||
for i := range modelfile.Commands {
|
||||
switch modelfile.Commands[i].Name {
|
||||
for _, c := range commands {
|
||||
switch c.Name {
|
||||
case "model", "adapter":
|
||||
path := modelfile.Commands[i].Args
|
||||
path := c.Args
|
||||
if path == "~" {
|
||||
path = home
|
||||
} else if strings.HasPrefix(path, "~/") {
|
||||
@@ -93,35 +85,29 @@ func CreateHandler(cmd *cobra.Command, args []string) error {
|
||||
path = filepath.Join(filepath.Dir(filename), path)
|
||||
}
|
||||
|
||||
fi, err := os.Stat(path)
|
||||
if errors.Is(err, os.ErrNotExist) && modelfile.Commands[i].Name == "model" {
|
||||
bin, err := os.Open(path)
|
||||
if errors.Is(err, os.ErrNotExist) && c.Name == "model" {
|
||||
continue
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
defer bin.Close()
|
||||
|
||||
if fi.IsDir() {
|
||||
// this is likely a safetensors or pytorch directory
|
||||
// TODO make this work w/ adapters
|
||||
tempfile, err := tempZipFiles(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer os.RemoveAll(tempfile)
|
||||
|
||||
path = tempfile
|
||||
hash := sha256.New()
|
||||
if _, err := io.Copy(hash, bin); err != nil {
|
||||
return err
|
||||
}
|
||||
bin.Seek(0, io.SeekStart)
|
||||
|
||||
digest, err := createBlob(cmd, client, path)
|
||||
if err != nil {
|
||||
digest := fmt.Sprintf("sha256:%x", hash.Sum(nil))
|
||||
if err = client.CreateBlob(cmd.Context(), digest, bin); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
modelfile.Commands[i].Args = "@" + digest
|
||||
modelfile = bytes.ReplaceAll(modelfile, []byte(c.Args), []byte("@"+digest))
|
||||
}
|
||||
}
|
||||
|
||||
bars := make(map[string]*progress.Bar)
|
||||
fn := func(resp api.ProgressResponse) error {
|
||||
if resp.Digest != "" {
|
||||
spinner.Stop()
|
||||
@@ -145,9 +131,7 @@ func CreateHandler(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
quantize, _ := cmd.Flags().GetString("quantize")
|
||||
|
||||
request := api.CreateRequest{Name: args[0], Modelfile: modelfile.String(), Quantize: quantize}
|
||||
request := api.CreateRequest{Name: args[0], Modelfile: string(modelfile)}
|
||||
if err := client.Create(cmd.Context(), &request, fn); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -155,137 +139,6 @@ func CreateHandler(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func tempZipFiles(path string) (string, error) {
|
||||
tempfile, err := os.CreateTemp("", "ollama-tf")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer tempfile.Close()
|
||||
|
||||
zipfile := zip.NewWriter(tempfile)
|
||||
defer zipfile.Close()
|
||||
|
||||
detectContentType := func(path string) (string, error) {
|
||||
f, err := os.Open(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
var b bytes.Buffer
|
||||
b.Grow(512)
|
||||
|
||||
if _, err := io.CopyN(&b, f, 512); err != nil && !errors.Is(err, io.EOF) {
|
||||
return "", err
|
||||
}
|
||||
|
||||
contentType, _, _ := strings.Cut(http.DetectContentType(b.Bytes()), ";")
|
||||
return contentType, nil
|
||||
}
|
||||
|
||||
glob := func(pattern, contentType string) ([]string, error) {
|
||||
matches, err := filepath.Glob(pattern)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, safetensor := range matches {
|
||||
if ct, err := detectContentType(safetensor); err != nil {
|
||||
return nil, err
|
||||
} else if ct != contentType {
|
||||
return nil, fmt.Errorf("invalid content type: expected %s for %s", ct, safetensor)
|
||||
}
|
||||
}
|
||||
|
||||
return matches, nil
|
||||
}
|
||||
|
||||
var files []string
|
||||
if st, _ := glob(filepath.Join(path, "model*.safetensors"), "application/octet-stream"); len(st) > 0 {
|
||||
// safetensors files might be unresolved git lfs references; skip if they are
|
||||
// covers model-x-of-y.safetensors, model.fp32-x-of-y.safetensors, model.safetensors
|
||||
files = append(files, st...)
|
||||
} else if pt, _ := glob(filepath.Join(path, "pytorch_model*.bin"), "application/zip"); len(pt) > 0 {
|
||||
// pytorch files might also be unresolved git lfs references; skip if they are
|
||||
// covers pytorch_model-x-of-y.bin, pytorch_model.fp32-x-of-y.bin, pytorch_model.bin
|
||||
files = append(files, pt...)
|
||||
} else if pt, _ := glob(filepath.Join(path, "consolidated*.pth"), "application/zip"); len(pt) > 0 {
|
||||
// pytorch files might also be unresolved git lfs references; skip if they are
|
||||
// covers consolidated.x.pth, consolidated.pth
|
||||
files = append(files, pt...)
|
||||
} else {
|
||||
return "", errors.New("no safetensors or torch files found")
|
||||
}
|
||||
|
||||
// add configuration files, json files are detected as text/plain
|
||||
js, err := glob(filepath.Join(path, "*.json"), "text/plain")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
files = append(files, js...)
|
||||
|
||||
if tks, _ := glob(filepath.Join(path, "tokenizer.model"), "application/octet-stream"); len(tks) > 0 {
|
||||
// add tokenizer.model if it exists, tokenizer.json is automatically picked up by the previous glob
|
||||
// tokenizer.model might be a unresolved git lfs reference; error if it is
|
||||
files = append(files, tks...)
|
||||
} else if tks, _ := glob(filepath.Join(path, "**/tokenizer.model"), "text/plain"); len(tks) > 0 {
|
||||
// some times tokenizer.model is in a subdirectory (e.g. meta-llama/Meta-Llama-3-8B)
|
||||
files = append(files, tks...)
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
f, err := os.Open(file)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
fi, err := f.Stat()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
zfi, err := zip.FileInfoHeader(fi)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
zf, err := zipfile.CreateHeader(zfi)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if _, err := io.Copy(zf, f); err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
return tempfile.Name(), nil
|
||||
}
|
||||
|
||||
func createBlob(cmd *cobra.Command, client *api.Client, path string) (string, error) {
|
||||
bin, err := os.Open(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer bin.Close()
|
||||
|
||||
hash := sha256.New()
|
||||
if _, err := io.Copy(hash, bin); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if _, err := bin.Seek(0, io.SeekStart); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
digest := fmt.Sprintf("sha256:%x", hash.Sum(nil))
|
||||
if err = client.CreateBlob(cmd.Context(), digest, bin); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return digest, nil
|
||||
}
|
||||
|
||||
func RunHandler(cmd *cobra.Command, args []string) error {
|
||||
client, err := api.ClientFromEnvironment()
|
||||
if err != nil {
|
||||
@@ -293,121 +146,19 @@ func RunHandler(cmd *cobra.Command, args []string) error {
|
||||
}
|
||||
|
||||
name := args[0]
|
||||
|
||||
// check if the model exists on the server
|
||||
show, err := client.Show(cmd.Context(), &api.ShowRequest{Name: name})
|
||||
_, err = client.Show(cmd.Context(), &api.ShowRequest{Name: name})
|
||||
var statusError api.StatusError
|
||||
switch {
|
||||
case errors.As(err, &statusError) && statusError.StatusCode == http.StatusNotFound:
|
||||
if err := PullHandler(cmd, []string{name}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
show, err = client.Show(cmd.Context(), &api.ShowRequest{Name: name})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case err != nil:
|
||||
return err
|
||||
}
|
||||
|
||||
interactive := true
|
||||
|
||||
opts := runOptions{
|
||||
Model: args[0],
|
||||
WordWrap: os.Getenv("TERM") == "xterm-256color",
|
||||
Options: map[string]interface{}{},
|
||||
MultiModal: slices.Contains(show.Details.Families, "clip"),
|
||||
ParentModel: show.Details.ParentModel,
|
||||
}
|
||||
|
||||
format, err := cmd.Flags().GetString("format")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opts.Format = format
|
||||
|
||||
keepAlive, err := cmd.Flags().GetString("keepalive")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if keepAlive != "" {
|
||||
d, err := time.ParseDuration(keepAlive)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opts.KeepAlive = &api.Duration{Duration: d}
|
||||
}
|
||||
|
||||
prompts := args[1:]
|
||||
// prepend stdin to the prompt if provided
|
||||
if !term.IsTerminal(int(os.Stdin.Fd())) {
|
||||
in, err := io.ReadAll(os.Stdin)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
prompts = append([]string{string(in)}, prompts...)
|
||||
opts.WordWrap = false
|
||||
interactive = false
|
||||
}
|
||||
opts.Prompt = strings.Join(prompts, " ")
|
||||
if len(prompts) > 0 {
|
||||
interactive = false
|
||||
}
|
||||
|
||||
nowrap, err := cmd.Flags().GetBool("nowordwrap")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opts.WordWrap = !nowrap
|
||||
|
||||
if !interactive {
|
||||
return generate(cmd, opts)
|
||||
}
|
||||
|
||||
return generateInteractive(cmd, opts)
|
||||
}
|
||||
|
||||
func errFromUnknownKey(unknownKeyErr error) error {
|
||||
// find SSH public key in the error message
|
||||
sshKeyPattern := `ssh-\w+ [^\s"]+`
|
||||
re := regexp.MustCompile(sshKeyPattern)
|
||||
matches := re.FindStringSubmatch(unknownKeyErr.Error())
|
||||
|
||||
if len(matches) > 0 {
|
||||
serverPubKey := matches[0]
|
||||
|
||||
localPubKey, err := auth.GetPublicKey()
|
||||
if err != nil {
|
||||
return unknownKeyErr
|
||||
}
|
||||
|
||||
if runtime.GOOS == "linux" && serverPubKey != localPubKey {
|
||||
// try the ollama service public key
|
||||
svcPubKey, err := os.ReadFile("/usr/share/ollama/.ollama/id_ed25519.pub")
|
||||
if err != nil {
|
||||
return unknownKeyErr
|
||||
}
|
||||
localPubKey = strings.TrimSpace(string(svcPubKey))
|
||||
}
|
||||
|
||||
// check if the returned public key matches the local public key, this prevents adding a remote key to the user's account
|
||||
if serverPubKey != localPubKey {
|
||||
return unknownKeyErr
|
||||
}
|
||||
|
||||
var msg strings.Builder
|
||||
msg.WriteString(unknownKeyErr.Error())
|
||||
msg.WriteString("\n\nYour ollama key is:\n")
|
||||
msg.WriteString(localPubKey)
|
||||
msg.WriteString("\nAdd your key at:\n")
|
||||
msg.WriteString("https://ollama.com/settings/keys")
|
||||
|
||||
return errors.New(msg.String())
|
||||
}
|
||||
|
||||
return unknownKeyErr
|
||||
return RunGenerate(cmd, args)
|
||||
}
|
||||
|
||||
func PushHandler(cmd *cobra.Command, args []string) error {
|
||||
@@ -457,20 +208,6 @@ func PushHandler(cmd *cobra.Command, args []string) error {
|
||||
|
||||
request := api.PushRequest{Name: args[0], Insecure: insecure}
|
||||
if err := client.Push(cmd.Context(), &request, fn); err != nil {
|
||||
if spinner != nil {
|
||||
spinner.Stop()
|
||||
}
|
||||
if strings.Contains(err.Error(), "access denied") {
|
||||
return errors.New("you are not authorized to push to this namespace, create the model under a namespace you own")
|
||||
}
|
||||
host := model.ParseName(args[0]).Host
|
||||
isOllamaHost := strings.HasSuffix(host, ".ollama.ai") || strings.HasSuffix(host, ".ollama.com")
|
||||
if strings.Contains(err.Error(), errtypes.UnknownOllamaKeyErrMsg) && isOllamaHost {
|
||||
// the user has not added their ollama key to ollama.com
|
||||
// re-throw an error with a more user-friendly message
|
||||
return errFromUnknownKey(err)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -511,52 +248,6 @@ func ListHandler(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func ListRunningHandler(cmd *cobra.Command, args []string) error {
|
||||
client, err := api.ClientFromEnvironment()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
models, err := client.ListRunning(cmd.Context())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var data [][]string
|
||||
|
||||
for _, m := range models.Models {
|
||||
if len(args) == 0 || strings.HasPrefix(m.Name, args[0]) {
|
||||
var procStr string
|
||||
switch {
|
||||
case m.SizeVRAM == 0:
|
||||
procStr = "100% CPU"
|
||||
case m.SizeVRAM == m.Size:
|
||||
procStr = "100% GPU"
|
||||
case m.SizeVRAM > m.Size || m.Size == 0:
|
||||
procStr = "Unknown"
|
||||
default:
|
||||
sizeCPU := m.Size - m.SizeVRAM
|
||||
cpuPercent := math.Round(float64(sizeCPU) / float64(m.Size) * 100)
|
||||
procStr = fmt.Sprintf("%d%%/%d%% CPU/GPU", int(cpuPercent), int(100-cpuPercent))
|
||||
}
|
||||
data = append(data, []string{m.Name, m.Digest[:12], format.HumanBytes(m.Size), procStr, format.HumanTime(m.ExpiresAt, "Never")})
|
||||
}
|
||||
}
|
||||
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
table.SetHeader([]string{"NAME", "ID", "SIZE", "PROCESSOR", "UNTIL"})
|
||||
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetHeaderLine(false)
|
||||
table.SetBorder(false)
|
||||
table.SetNoWhiteSpace(true)
|
||||
table.SetTablePadding("\t")
|
||||
table.AppendBulk(data)
|
||||
table.Render()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func DeleteHandler(cmd *cobra.Command, args []string) error {
|
||||
client, err := api.ClientFromEnvironment()
|
||||
if err != nil {
|
||||
@@ -719,21 +410,63 @@ func PullHandler(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func RunGenerate(cmd *cobra.Command, args []string) error {
|
||||
interactive := true
|
||||
|
||||
opts := runOptions{
|
||||
Model: args[0],
|
||||
WordWrap: os.Getenv("TERM") == "xterm-256color",
|
||||
Options: map[string]interface{}{},
|
||||
}
|
||||
|
||||
format, err := cmd.Flags().GetString("format")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opts.Format = format
|
||||
|
||||
prompts := args[1:]
|
||||
// prepend stdin to the prompt if provided
|
||||
if !term.IsTerminal(int(os.Stdin.Fd())) {
|
||||
in, err := io.ReadAll(os.Stdin)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
prompts = append([]string{string(in)}, prompts...)
|
||||
opts.WordWrap = false
|
||||
interactive = false
|
||||
}
|
||||
opts.Prompt = strings.Join(prompts, " ")
|
||||
if len(prompts) > 0 {
|
||||
interactive = false
|
||||
}
|
||||
|
||||
nowrap, err := cmd.Flags().GetBool("nowordwrap")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opts.WordWrap = !nowrap
|
||||
|
||||
if !interactive {
|
||||
return generate(cmd, opts)
|
||||
}
|
||||
|
||||
return generateInteractive(cmd, opts)
|
||||
}
|
||||
|
||||
type generateContextKey string
|
||||
|
||||
type runOptions struct {
|
||||
Model string
|
||||
ParentModel string
|
||||
Prompt string
|
||||
Messages []api.Message
|
||||
WordWrap bool
|
||||
Format string
|
||||
System string
|
||||
Template string
|
||||
Images []api.ImageData
|
||||
Options map[string]interface{}
|
||||
MultiModal bool
|
||||
KeepAlive *api.Duration
|
||||
Model string
|
||||
Prompt string
|
||||
Messages []api.Message
|
||||
WordWrap bool
|
||||
Format string
|
||||
System string
|
||||
Template string
|
||||
Images []api.ImageData
|
||||
Options map[string]interface{}
|
||||
}
|
||||
|
||||
type displayResponseState struct {
|
||||
@@ -746,8 +479,7 @@ func displayResponse(content string, wordWrap bool, state *displayResponseState)
|
||||
if wordWrap && termWidth >= 10 {
|
||||
for _, ch := range content {
|
||||
if state.lineLength+1 > termWidth-5 {
|
||||
|
||||
if runewidth.StringWidth(state.wordBuffer) > termWidth-10 {
|
||||
if len(state.wordBuffer) > termWidth-10 {
|
||||
fmt.Printf("%s%c", state.wordBuffer, ch)
|
||||
state.wordBuffer = ""
|
||||
state.lineLength = 0
|
||||
@@ -755,18 +487,12 @@ func displayResponse(content string, wordWrap bool, state *displayResponseState)
|
||||
}
|
||||
|
||||
// backtrack the length of the last word and clear to the end of the line
|
||||
fmt.Printf("\x1b[%dD\x1b[K\n", runewidth.StringWidth(state.wordBuffer))
|
||||
fmt.Printf("\x1b[%dD\x1b[K\n", len(state.wordBuffer))
|
||||
fmt.Printf("%s%c", state.wordBuffer, ch)
|
||||
chWidth := runewidth.RuneWidth(ch)
|
||||
|
||||
state.lineLength = runewidth.StringWidth(state.wordBuffer) + chWidth
|
||||
state.lineLength = len(state.wordBuffer) + 1
|
||||
} else {
|
||||
fmt.Print(string(ch))
|
||||
state.lineLength += runewidth.RuneWidth(ch)
|
||||
if runewidth.RuneWidth(ch) >= 2 {
|
||||
state.wordBuffer = ""
|
||||
continue
|
||||
}
|
||||
state.lineLength += 1
|
||||
|
||||
switch ch {
|
||||
case ' ':
|
||||
@@ -835,10 +561,6 @@ func chat(cmd *cobra.Command, opts runOptions) (*api.Message, error) {
|
||||
Options: opts.Options,
|
||||
}
|
||||
|
||||
if opts.KeepAlive != nil {
|
||||
req.KeepAlive = opts.KeepAlive
|
||||
}
|
||||
|
||||
if err := client.Chat(cancelCtx, req, fn); err != nil {
|
||||
if errors.Is(err, context.Canceled) {
|
||||
return nil, nil
|
||||
@@ -906,23 +628,14 @@ func generate(cmd *cobra.Command, opts runOptions) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if opts.MultiModal {
|
||||
opts.Prompt, opts.Images, err = extractFileData(opts.Prompt)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
request := api.GenerateRequest{
|
||||
Model: opts.Model,
|
||||
Prompt: opts.Prompt,
|
||||
Context: generateContext,
|
||||
Images: opts.Images,
|
||||
Format: opts.Format,
|
||||
System: opts.System,
|
||||
Template: opts.Template,
|
||||
Options: opts.Options,
|
||||
KeepAlive: opts.KeepAlive,
|
||||
Model: opts.Model,
|
||||
Prompt: opts.Prompt,
|
||||
Context: generateContext,
|
||||
Format: opts.Format,
|
||||
System: opts.System,
|
||||
Template: opts.Template,
|
||||
Options: opts.Options,
|
||||
}
|
||||
|
||||
if err := client.Generate(ctx, &request, fn); err != nil {
|
||||
@@ -957,27 +670,24 @@ func generate(cmd *cobra.Command, opts runOptions) error {
|
||||
}
|
||||
|
||||
func RunServer(cmd *cobra.Command, _ []string) error {
|
||||
// retrieve the OLLAMA_HOST environment variable
|
||||
ollamaHost, err := api.GetOllamaHost()
|
||||
host, port, err := net.SplitHostPort(os.Getenv("OLLAMA_HOST"))
|
||||
if err != nil {
|
||||
return err
|
||||
host, port = "127.0.0.1", "11434"
|
||||
if ip := net.ParseIP(strings.Trim(os.Getenv("OLLAMA_HOST"), "[]")); ip != nil {
|
||||
host = ip.String()
|
||||
}
|
||||
}
|
||||
|
||||
if err := initializeKeypair(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ln, err := net.Listen("tcp", net.JoinHostPort(ollamaHost.Host, ollamaHost.Port))
|
||||
ln, err := net.Listen("tcp", net.JoinHostPort(host, port))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.Serve(ln)
|
||||
if errors.Is(err, http.ErrServerClosed) {
|
||||
return nil
|
||||
}
|
||||
|
||||
return err
|
||||
return server.Serve(ln)
|
||||
}
|
||||
|
||||
func initializeKeypair() error {
|
||||
@@ -992,42 +702,59 @@ func initializeKeypair() error {
|
||||
_, err = os.Stat(privKeyPath)
|
||||
if os.IsNotExist(err) {
|
||||
fmt.Printf("Couldn't find '%s'. Generating new private key.\n", privKeyPath)
|
||||
cryptoPublicKey, cryptoPrivateKey, err := ed25519.GenerateKey(rand.Reader)
|
||||
_, privKey, err := ed25519.GenerateKey(rand.Reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
privateKeyBytes, err := ssh.MarshalPrivateKey(cryptoPrivateKey, "")
|
||||
privKeyBytes, err := format.OpenSSHPrivateKey(privKey, "")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(filepath.Dir(privKeyPath), 0o755); err != nil {
|
||||
err = os.MkdirAll(filepath.Dir(privKeyPath), 0o755)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not create directory %w", err)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(privKeyPath, pem.EncodeToMemory(privateKeyBytes), 0o600); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sshPublicKey, err := ssh.NewPublicKey(cryptoPublicKey)
|
||||
err = os.WriteFile(privKeyPath, pem.EncodeToMemory(privKeyBytes), 0o600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
publicKeyBytes := ssh.MarshalAuthorizedKey(sshPublicKey)
|
||||
|
||||
if err := os.WriteFile(pubKeyPath, publicKeyBytes, 0o644); err != nil {
|
||||
sshPrivateKey, err := ssh.NewSignerFromKey(privKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Printf("Your new public key is: \n\n%s\n", publicKeyBytes)
|
||||
pubKeyData := ssh.MarshalAuthorizedKey(sshPrivateKey.PublicKey())
|
||||
|
||||
err = os.WriteFile(pubKeyPath, pubKeyData, 0o644)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Printf("Your new public key is: \n\n%s\n", string(pubKeyData))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
//nolint:unused
|
||||
func waitForServer(ctx context.Context, client *api.Client) error {
|
||||
func startMacApp(ctx context.Context, client *api.Client) error {
|
||||
exe, err := os.Executable()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
link, err := os.Readlink(exe)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !strings.Contains(link, "Ollama.app") {
|
||||
return fmt.Errorf("could not find ollama app")
|
||||
}
|
||||
path := strings.Split(link, "Ollama.app")
|
||||
if err := exec.Command("/usr/bin/open", "-a", path[0]+"Ollama.app").Run(); err != nil {
|
||||
return err
|
||||
}
|
||||
// wait for the server to start
|
||||
timeout := time.After(5 * time.Second)
|
||||
tick := time.Tick(500 * time.Millisecond)
|
||||
@@ -1041,7 +768,6 @@ func waitForServer(ctx context.Context, client *api.Client) error {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func checkServerHeartbeat(cmd *cobra.Command, _ []string) error {
|
||||
@@ -1050,11 +776,15 @@ func checkServerHeartbeat(cmd *cobra.Command, _ []string) error {
|
||||
return err
|
||||
}
|
||||
if err := client.Heartbeat(cmd.Context()); err != nil {
|
||||
if !strings.Contains(err.Error(), " refused") {
|
||||
if !strings.Contains(err.Error(), "connection refused") {
|
||||
return err
|
||||
}
|
||||
if err := startApp(cmd.Context(), client); err != nil {
|
||||
return fmt.Errorf("could not connect to ollama app, is it running?")
|
||||
if runtime.GOOS == "darwin" {
|
||||
if err := startMacApp(cmd.Context(), client); err != nil {
|
||||
return fmt.Errorf("could not connect to ollama app, is it running?")
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("could not connect to ollama server, run 'ollama serve' to start it")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
@@ -1080,29 +810,10 @@ func versionHandler(cmd *cobra.Command, _ []string) {
|
||||
}
|
||||
}
|
||||
|
||||
func appendEnvDocs(cmd *cobra.Command, envs []envconfig.EnvVar) {
|
||||
if len(envs) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
envUsage := `
|
||||
Environment Variables:
|
||||
`
|
||||
for _, e := range envs {
|
||||
envUsage += fmt.Sprintf(" %-24s %s\n", e.Name, e.Description)
|
||||
}
|
||||
|
||||
cmd.SetUsageTemplate(cmd.UsageTemplate() + envUsage)
|
||||
}
|
||||
|
||||
func NewCLI() *cobra.Command {
|
||||
log.SetFlags(log.LstdFlags | log.Lshortfile)
|
||||
cobra.EnableCommandSorting = false
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
console.ConsoleFromFile(os.Stdin) //nolint:errcheck
|
||||
}
|
||||
|
||||
rootCmd := &cobra.Command{
|
||||
Use: "ollama",
|
||||
Short: "Large language model runner",
|
||||
@@ -1131,8 +842,7 @@ func NewCLI() *cobra.Command {
|
||||
RunE: CreateHandler,
|
||||
}
|
||||
|
||||
createCmd.Flags().StringP("file", "f", "Modelfile", "Name of the Modelfile")
|
||||
createCmd.Flags().StringP("quantize", "q", "", "Quantize model to this level (e.g. q4_0)")
|
||||
createCmd.Flags().StringP("file", "f", "Modelfile", "Name of the Modelfile (default \"Modelfile\")")
|
||||
|
||||
showCmd := &cobra.Command{
|
||||
Use: "show MODEL",
|
||||
@@ -1156,11 +866,11 @@ func NewCLI() *cobra.Command {
|
||||
RunE: RunHandler,
|
||||
}
|
||||
|
||||
runCmd.Flags().String("keepalive", "", "Duration to keep a model loaded (e.g. 5m)")
|
||||
runCmd.Flags().Bool("verbose", false, "Show timings for response")
|
||||
runCmd.Flags().Bool("insecure", false, "Use an insecure registry")
|
||||
runCmd.Flags().Bool("nowordwrap", false, "Don't wrap words to the next line automatically")
|
||||
runCmd.Flags().String("format", "", "Response format (e.g. json)")
|
||||
|
||||
serveCmd := &cobra.Command{
|
||||
Use: "serve",
|
||||
Aliases: []string{"start"},
|
||||
@@ -1197,15 +907,8 @@ func NewCLI() *cobra.Command {
|
||||
RunE: ListHandler,
|
||||
}
|
||||
|
||||
psCmd := &cobra.Command{
|
||||
Use: "ps",
|
||||
Short: "List running models",
|
||||
PreRunE: checkServerHeartbeat,
|
||||
RunE: ListRunningHandler,
|
||||
}
|
||||
|
||||
copyCmd := &cobra.Command{
|
||||
Use: "cp SOURCE DESTINATION",
|
||||
Use: "cp SOURCE TARGET",
|
||||
Short: "Copy a model",
|
||||
Args: cobra.ExactArgs(2),
|
||||
PreRunE: checkServerHeartbeat,
|
||||
@@ -1220,43 +923,6 @@ func NewCLI() *cobra.Command {
|
||||
RunE: DeleteHandler,
|
||||
}
|
||||
|
||||
envVars := envconfig.AsMap()
|
||||
|
||||
envs := []envconfig.EnvVar{envVars["OLLAMA_HOST"]}
|
||||
|
||||
for _, cmd := range []*cobra.Command{
|
||||
createCmd,
|
||||
showCmd,
|
||||
runCmd,
|
||||
pullCmd,
|
||||
pushCmd,
|
||||
listCmd,
|
||||
psCmd,
|
||||
copyCmd,
|
||||
deleteCmd,
|
||||
serveCmd,
|
||||
} {
|
||||
switch cmd {
|
||||
case runCmd:
|
||||
appendEnvDocs(cmd, []envconfig.EnvVar{envVars["OLLAMA_HOST"], envVars["OLLAMA_NOHISTORY"]})
|
||||
case serveCmd:
|
||||
appendEnvDocs(cmd, []envconfig.EnvVar{
|
||||
envVars["OLLAMA_DEBUG"],
|
||||
envVars["OLLAMA_HOST"],
|
||||
envVars["OLLAMA_KEEP_ALIVE"],
|
||||
envVars["OLLAMA_MAX_LOADED_MODELS"],
|
||||
envVars["OLLAMA_MAX_QUEUE"],
|
||||
envVars["OLLAMA_MODELS"],
|
||||
envVars["OLLAMA_NUM_PARALLEL"],
|
||||
envVars["OLLAMA_NOPRUNE"],
|
||||
envVars["OLLAMA_ORIGINS"],
|
||||
envVars["OLLAMA_TMPDIR"],
|
||||
})
|
||||
default:
|
||||
appendEnvDocs(cmd, envs)
|
||||
}
|
||||
}
|
||||
|
||||
rootCmd.AddCommand(
|
||||
serveCmd,
|
||||
createCmd,
|
||||
@@ -1265,7 +931,6 @@ func NewCLI() *cobra.Command {
|
||||
pullCmd,
|
||||
pushCmd,
|
||||
listCmd,
|
||||
psCmd,
|
||||
copyCmd,
|
||||
deleteCmd,
|
||||
)
|
||||
|
@@ -6,19 +6,14 @@ import (
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"golang.org/x/exp/slices"
|
||||
|
||||
"github.com/ollama/ollama/api"
|
||||
"github.com/ollama/ollama/envconfig"
|
||||
"github.com/ollama/ollama/progress"
|
||||
"github.com/ollama/ollama/readline"
|
||||
"github.com/ollama/ollama/types/errtypes"
|
||||
"github.com/jmorganca/ollama/api"
|
||||
"github.com/jmorganca/ollama/readline"
|
||||
)
|
||||
|
||||
type MultilineState int
|
||||
@@ -30,88 +25,45 @@ const (
|
||||
MultilineTemplate
|
||||
)
|
||||
|
||||
func loadModel(cmd *cobra.Command, opts *runOptions) error {
|
||||
func modelIsMultiModal(cmd *cobra.Command, name string) bool {
|
||||
// get model details
|
||||
client, err := api.ClientFromEnvironment()
|
||||
if err != nil {
|
||||
return err
|
||||
fmt.Println("error: couldn't connect to ollama server")
|
||||
return false
|
||||
}
|
||||
|
||||
p := progress.NewProgress(os.Stderr)
|
||||
defer p.StopAndClear()
|
||||
|
||||
spinner := progress.NewSpinner("")
|
||||
p.Add("", spinner)
|
||||
|
||||
showReq := api.ShowRequest{Name: opts.Model}
|
||||
showResp, err := client.Show(cmd.Context(), &showReq)
|
||||
req := api.ShowRequest{Name: name}
|
||||
resp, err := client.Show(cmd.Context(), &req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opts.MultiModal = slices.Contains(showResp.Details.Families, "clip")
|
||||
opts.ParentModel = showResp.Details.ParentModel
|
||||
|
||||
if len(showResp.Messages) > 0 {
|
||||
opts.Messages = append(opts.Messages, showResp.Messages...)
|
||||
return false
|
||||
}
|
||||
|
||||
chatReq := &api.ChatRequest{
|
||||
Model: opts.Model,
|
||||
Messages: []api.Message{},
|
||||
}
|
||||
|
||||
if opts.KeepAlive != nil {
|
||||
chatReq.KeepAlive = opts.KeepAlive
|
||||
}
|
||||
|
||||
err = client.Chat(cmd.Context(), chatReq, func(resp api.ChatResponse) error {
|
||||
p.StopAndClear()
|
||||
if len(opts.Messages) > 0 {
|
||||
for _, msg := range opts.Messages {
|
||||
switch msg.Role {
|
||||
case "user":
|
||||
fmt.Printf(">>> %s\n", msg.Content)
|
||||
case "assistant":
|
||||
state := &displayResponseState{}
|
||||
displayResponse(msg.Content, opts.WordWrap, state)
|
||||
fmt.Println()
|
||||
fmt.Println()
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
return slices.Contains(resp.Details.Families, "clip")
|
||||
}
|
||||
|
||||
func generateInteractive(cmd *cobra.Command, opts runOptions) error {
|
||||
opts.Messages = make([]api.Message, 0)
|
||||
multiModal := modelIsMultiModal(cmd, opts.Model)
|
||||
|
||||
err := loadModel(cmd, &opts)
|
||||
if err != nil {
|
||||
// load the model
|
||||
loadOpts := runOptions{
|
||||
Model: opts.Model,
|
||||
Prompt: "",
|
||||
Messages: []api.Message{},
|
||||
}
|
||||
if _, err := chat(cmd, loadOpts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
usage := func() {
|
||||
fmt.Fprintln(os.Stderr, "Available Commands:")
|
||||
fmt.Fprintln(os.Stderr, " /set Set session variables")
|
||||
fmt.Fprintln(os.Stderr, " /show Show model information")
|
||||
fmt.Fprintln(os.Stderr, " /load <model> Load a session or model")
|
||||
fmt.Fprintln(os.Stderr, " /save <model> Save your current session")
|
||||
fmt.Fprintln(os.Stderr, " /clear Clear session context")
|
||||
fmt.Fprintln(os.Stderr, " /bye Exit")
|
||||
fmt.Fprintln(os.Stderr, " /?, /help Help for a command")
|
||||
fmt.Fprintln(os.Stderr, " /? shortcuts Help for keyboard shortcuts")
|
||||
fmt.Fprintln(os.Stderr, " /set Set session variables")
|
||||
fmt.Fprintln(os.Stderr, " /show Show model information")
|
||||
fmt.Fprintln(os.Stderr, " /bye Exit")
|
||||
fmt.Fprintln(os.Stderr, " /?, /help Help for a command")
|
||||
fmt.Fprintln(os.Stderr, " /? shortcuts Help for keyboard shortcuts")
|
||||
fmt.Fprintln(os.Stderr, "")
|
||||
fmt.Fprintln(os.Stderr, "Use \"\"\" to begin a multi-line message.")
|
||||
|
||||
if opts.MultiModal {
|
||||
fmt.Fprintf(os.Stderr, "Use %s to include .jpg or .png images.\n", filepath.FromSlash("/path/to/file"))
|
||||
}
|
||||
|
||||
fmt.Fprintln(os.Stderr, "")
|
||||
}
|
||||
|
||||
@@ -139,7 +91,6 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
|
||||
fmt.Fprintln(os.Stderr, " Alt + f Move forward (right) one word")
|
||||
fmt.Fprintln(os.Stderr, " Ctrl + k Delete the sentence after the cursor")
|
||||
fmt.Fprintln(os.Stderr, " Ctrl + u Delete the sentence before the cursor")
|
||||
fmt.Fprintln(os.Stderr, " Ctrl + w Delete the word before the cursor")
|
||||
fmt.Fprintln(os.Stderr, "")
|
||||
fmt.Fprintln(os.Stderr, " Ctrl + l Clear the screen")
|
||||
fmt.Fprintln(os.Stderr, " Ctrl + c Stop the model from responding")
|
||||
@@ -170,7 +121,7 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
|
||||
fmt.Fprintln(os.Stderr, " /set parameter repeat_penalty <float> How strongly to penalize repetitions")
|
||||
fmt.Fprintln(os.Stderr, " /set parameter repeat_last_n <int> Set how far back to look for repetitions")
|
||||
fmt.Fprintln(os.Stderr, " /set parameter num_gpu <int> The number of layers to send to the GPU")
|
||||
fmt.Fprintln(os.Stderr, " /set parameter stop <string> <string> ... Set the stop parameters")
|
||||
fmt.Fprintln(os.Stderr, " /set parameter stop \"<string>\", ... Set the stop parameters")
|
||||
fmt.Fprintln(os.Stderr, "")
|
||||
}
|
||||
|
||||
@@ -184,15 +135,12 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if envconfig.NoHistory {
|
||||
scanner.HistoryDisable()
|
||||
}
|
||||
|
||||
fmt.Print(readline.StartBracketedPaste)
|
||||
defer fmt.Printf(readline.EndBracketedPaste)
|
||||
|
||||
var sb strings.Builder
|
||||
var multiline MultilineState
|
||||
opts.Messages = make([]api.Message, 0)
|
||||
|
||||
for {
|
||||
line, err := scanner.Readline()
|
||||
@@ -226,7 +174,6 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
|
||||
switch multiline {
|
||||
case MultilineSystem:
|
||||
opts.System = sb.String()
|
||||
opts.Messages = append(opts.Messages, api.Message{Role: "system", Content: opts.System})
|
||||
fmt.Println("Set system message.")
|
||||
sb.Reset()
|
||||
case MultilineTemplate:
|
||||
@@ -246,6 +193,7 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
|
||||
fmt.Fprintln(&sb)
|
||||
multiline = MultilinePrompt
|
||||
scanner.Prompt.UseAlt = true
|
||||
break
|
||||
}
|
||||
case scanner.Pasting:
|
||||
fmt.Fprintln(&sb, line)
|
||||
@@ -255,55 +203,6 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
|
||||
if err := ListHandler(cmd, args[1:]); err != nil {
|
||||
return err
|
||||
}
|
||||
case strings.HasPrefix(line, "/load"):
|
||||
args := strings.Fields(line)
|
||||
if len(args) != 2 {
|
||||
fmt.Println("Usage:\n /load <modelname>")
|
||||
continue
|
||||
}
|
||||
opts.Model = args[1]
|
||||
opts.Messages = []api.Message{}
|
||||
fmt.Printf("Loading model '%s'\n", opts.Model)
|
||||
if err := loadModel(cmd, &opts); err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
case strings.HasPrefix(line, "/save"):
|
||||
args := strings.Fields(line)
|
||||
if len(args) != 2 {
|
||||
fmt.Println("Usage:\n /save <modelname>")
|
||||
continue
|
||||
}
|
||||
|
||||
client, err := api.ClientFromEnvironment()
|
||||
if err != nil {
|
||||
fmt.Println("error: couldn't connect to ollama server")
|
||||
return err
|
||||
}
|
||||
|
||||
req := &api.CreateRequest{
|
||||
Name: args[1],
|
||||
Modelfile: buildModelfile(opts),
|
||||
}
|
||||
fn := func(resp api.ProgressResponse) error { return nil }
|
||||
err = client.Create(cmd.Context(), req, fn)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), errtypes.InvalidModelNameErrMsg) {
|
||||
fmt.Printf("error: The model name '%s' is invalid\n", args[1])
|
||||
continue
|
||||
}
|
||||
return err
|
||||
}
|
||||
fmt.Printf("Created new model '%s'\n", args[1])
|
||||
continue
|
||||
case strings.HasPrefix(line, "/clear"):
|
||||
opts.Messages = []api.Message{}
|
||||
if opts.System != "" {
|
||||
newMessage := api.Message{Role: "system", Content: opts.System}
|
||||
opts.Messages = append(opts.Messages, newMessage)
|
||||
}
|
||||
fmt.Println("Cleared session context")
|
||||
continue
|
||||
case strings.HasPrefix(line, "/set"):
|
||||
args := strings.Fields(line)
|
||||
if len(args) > 1 {
|
||||
@@ -319,14 +218,10 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
|
||||
opts.WordWrap = false
|
||||
fmt.Println("Set 'nowordwrap' mode.")
|
||||
case "verbose":
|
||||
if err := cmd.Flags().Set("verbose", "true"); err != nil {
|
||||
return err
|
||||
}
|
||||
cmd.Flags().Set("verbose", "true")
|
||||
fmt.Println("Set 'verbose' mode.")
|
||||
case "quiet":
|
||||
if err := cmd.Flags().Set("verbose", "false"); err != nil {
|
||||
return err
|
||||
}
|
||||
cmd.Flags().Set("verbose", "false")
|
||||
fmt.Println("Set 'quiet' mode.")
|
||||
case "format":
|
||||
if len(args) < 3 || args[2] != "json" {
|
||||
@@ -382,21 +277,11 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
|
||||
}
|
||||
|
||||
if args[1] == "system" {
|
||||
opts.System = sb.String() // for display in modelfile
|
||||
newMessage := api.Message{Role: "system", Content: sb.String()}
|
||||
// Check if the slice is not empty and the last message is from 'system'
|
||||
if len(opts.Messages) > 0 && opts.Messages[len(opts.Messages)-1].Role == "system" {
|
||||
// Replace the last message
|
||||
opts.Messages[len(opts.Messages)-1] = newMessage
|
||||
} else {
|
||||
opts.Messages = append(opts.Messages, newMessage)
|
||||
}
|
||||
opts.System = sb.String()
|
||||
fmt.Println("Set system message.")
|
||||
sb.Reset()
|
||||
} else if args[1] == "template" {
|
||||
opts.Template = sb.String()
|
||||
fmt.Println("Set prompt template.")
|
||||
sb.Reset()
|
||||
}
|
||||
|
||||
sb.Reset()
|
||||
@@ -498,13 +383,13 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
|
||||
} else {
|
||||
usage()
|
||||
}
|
||||
case strings.HasPrefix(line, "/exit"), strings.HasPrefix(line, "/bye"):
|
||||
case line == "/exit", line == "/bye":
|
||||
return nil
|
||||
case strings.HasPrefix(line, "/"):
|
||||
args := strings.Fields(line)
|
||||
isFile := false
|
||||
|
||||
if opts.MultiModal {
|
||||
if multiModal {
|
||||
for _, f := range extractFileNames(line) {
|
||||
if strings.HasPrefix(f, args[0]) {
|
||||
isFile = true
|
||||
@@ -526,23 +411,34 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
|
||||
if sb.Len() > 0 && multiline == MultilineNone {
|
||||
newMessage := api.Message{Role: "user", Content: sb.String()}
|
||||
|
||||
if opts.MultiModal {
|
||||
if multiModal {
|
||||
msg, images, err := extractFileData(sb.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
newMessage.Content = msg
|
||||
|
||||
// clear all previous images for better responses
|
||||
// reset the context if we find another image
|
||||
if len(images) > 0 {
|
||||
for i := range opts.Messages {
|
||||
opts.Messages[i].Images = nil
|
||||
newMessage.Images = append(newMessage.Images, images...)
|
||||
// reset the context for the new image
|
||||
opts.Messages = []api.Message{}
|
||||
} else {
|
||||
if len(opts.Messages) > 1 {
|
||||
newMessage.Images = append(newMessage.Images, opts.Messages[len(opts.Messages)-2].Images...)
|
||||
}
|
||||
}
|
||||
|
||||
newMessage.Content = msg
|
||||
newMessage.Images = images
|
||||
if len(newMessage.Images) == 0 {
|
||||
fmt.Println("This model requires you to add a jpeg, png, or svg image.")
|
||||
fmt.Println()
|
||||
sb.Reset()
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if opts.System != "" {
|
||||
opts.Messages = append(opts.Messages, api.Message{Role: "system", Content: opts.System})
|
||||
}
|
||||
opts.Messages = append(opts.Messages, newMessage)
|
||||
|
||||
assistant, err := chat(cmd, opts)
|
||||
@@ -558,38 +454,6 @@ func generateInteractive(cmd *cobra.Command, opts runOptions) error {
|
||||
}
|
||||
}
|
||||
|
||||
func buildModelfile(opts runOptions) string {
|
||||
var mf strings.Builder
|
||||
model := opts.ParentModel
|
||||
if model == "" {
|
||||
model = opts.Model
|
||||
}
|
||||
fmt.Fprintf(&mf, "FROM %s\n", model)
|
||||
if opts.System != "" {
|
||||
fmt.Fprintf(&mf, "SYSTEM \"\"\"%s\"\"\"\n", opts.System)
|
||||
}
|
||||
|
||||
if opts.Template != "" {
|
||||
fmt.Fprintf(&mf, "TEMPLATE \"\"\"%s\"\"\"\n", opts.Template)
|
||||
}
|
||||
|
||||
keys := make([]string, 0)
|
||||
for k := range opts.Options {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
for _, k := range keys {
|
||||
fmt.Fprintf(&mf, "PARAMETER %s %v\n", k, opts.Options[k])
|
||||
}
|
||||
fmt.Fprintln(&mf)
|
||||
|
||||
for _, msg := range opts.Messages {
|
||||
fmt.Fprintf(&mf, "MESSAGE %s \"\"\"%s\"\"\"\n", msg.Role, msg.Content)
|
||||
}
|
||||
|
||||
return mf.String()
|
||||
}
|
||||
|
||||
func normalizeFilePath(fp string) string {
|
||||
// Define a map of escaped characters and their replacements
|
||||
replacements := map[string]string{
|
||||
@@ -636,10 +500,10 @@ func extractFileData(input string) (string, []api.ImageData, error) {
|
||||
if os.IsNotExist(err) {
|
||||
continue
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "Couldn't process image: %q\n", err)
|
||||
fmt.Printf("Couldn't process image: %q\n", err)
|
||||
return "", imgs, err
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "Added image '%s'\n", nfp)
|
||||
fmt.Printf("Added image '%s'\n", nfp)
|
||||
input = strings.ReplaceAll(input, fp, "")
|
||||
imgs = append(imgs, data)
|
||||
}
|
||||
@@ -660,7 +524,7 @@ func getImageData(filePath string) ([]byte, error) {
|
||||
}
|
||||
|
||||
contentType := http.DetectContentType(buf)
|
||||
allowedTypes := []string{"image/jpeg", "image/jpg", "image/png"}
|
||||
allowedTypes := []string{"image/jpeg", "image/jpg", "image/svg+xml", "image/png"}
|
||||
if !slices.Contains(allowedTypes, contentType) {
|
||||
return nil, fmt.Errorf("invalid image type: %s", contentType)
|
||||
}
|
||||
|
@@ -1,13 +1,9 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
"text/template"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/ollama/ollama/api"
|
||||
)
|
||||
|
||||
func TestExtractFilenames(t *testing.T) {
|
||||
@@ -53,64 +49,3 @@ d:\path with\spaces\seven.svg inbetween7 c:\users\jdoe\eight.png inbetween8
|
||||
assert.Contains(t, res[9], "ten.svg")
|
||||
assert.Contains(t, res[9], "E:")
|
||||
}
|
||||
|
||||
func TestModelfileBuilder(t *testing.T) {
|
||||
opts := runOptions{
|
||||
Model: "hork",
|
||||
System: "You are part horse and part shark, but all hork. Do horklike things",
|
||||
Template: "This is a template.",
|
||||
Messages: []api.Message{
|
||||
{Role: "user", Content: "Hey there hork!"},
|
||||
{Role: "assistant", Content: "Yes it is true, I am half horse, half shark."},
|
||||
},
|
||||
Options: map[string]interface{}{},
|
||||
}
|
||||
|
||||
opts.Options["temperature"] = 0.9
|
||||
opts.Options["seed"] = 42
|
||||
opts.Options["penalize_newline"] = false
|
||||
opts.Options["stop"] = []string{"hi", "there"}
|
||||
|
||||
mf := buildModelfile(opts)
|
||||
expectedModelfile := `FROM {{.Model}}
|
||||
SYSTEM """{{.System}}"""
|
||||
TEMPLATE """{{.Template}}"""
|
||||
PARAMETER penalize_newline false
|
||||
PARAMETER seed 42
|
||||
PARAMETER stop [hi there]
|
||||
PARAMETER temperature 0.9
|
||||
|
||||
MESSAGE user """Hey there hork!"""
|
||||
MESSAGE assistant """Yes it is true, I am half horse, half shark."""
|
||||
`
|
||||
|
||||
tmpl, err := template.New("").Parse(expectedModelfile)
|
||||
assert.Nil(t, err)
|
||||
|
||||
var buf bytes.Buffer
|
||||
err = tmpl.Execute(&buf, opts)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, buf.String(), mf)
|
||||
|
||||
opts.ParentModel = "horseshark"
|
||||
mf = buildModelfile(opts)
|
||||
expectedModelfile = `FROM {{.ParentModel}}
|
||||
SYSTEM """{{.System}}"""
|
||||
TEMPLATE """{{.Template}}"""
|
||||
PARAMETER penalize_newline false
|
||||
PARAMETER seed 42
|
||||
PARAMETER stop [hi there]
|
||||
PARAMETER temperature 0.9
|
||||
|
||||
MESSAGE user """Hey there hork!"""
|
||||
MESSAGE assistant """Yes it is true, I am half horse, half shark."""
|
||||
`
|
||||
|
||||
tmpl, err = template.New("").Parse(expectedModelfile)
|
||||
assert.Nil(t, err)
|
||||
|
||||
var parentBuf bytes.Buffer
|
||||
err = tmpl.Execute(&parentBuf, opts)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, parentBuf.String(), mf)
|
||||
}
|
||||
|
@@ -1,30 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
|
||||
"github.com/ollama/ollama/api"
|
||||
)
|
||||
|
||||
func startApp(ctx context.Context, client *api.Client) error {
|
||||
exe, err := os.Executable()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
link, err := os.Readlink(exe)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !strings.Contains(link, "Ollama.app") {
|
||||
return fmt.Errorf("could not find ollama app")
|
||||
}
|
||||
path := strings.Split(link, "Ollama.app")
|
||||
if err := exec.Command("/usr/bin/open", "-a", path[0]+"Ollama.app").Run(); err != nil {
|
||||
return err
|
||||
}
|
||||
return waitForServer(ctx, client)
|
||||
}
|
@@ -1,14 +0,0 @@
|
||||
//go:build !windows && !darwin
|
||||
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/ollama/ollama/api"
|
||||
)
|
||||
|
||||
func startApp(ctx context.Context, client *api.Client) error {
|
||||
return fmt.Errorf("could not connect to ollama server, run 'ollama serve' to start it")
|
||||
}
|
@@ -1,58 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"syscall"
|
||||
|
||||
"github.com/ollama/ollama/api"
|
||||
)
|
||||
|
||||
func startApp(ctx context.Context, client *api.Client) error {
|
||||
// log.Printf("XXX Attempting to find and start ollama app")
|
||||
AppName := "ollama app.exe"
|
||||
exe, err := os.Executable()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
appExe := filepath.Join(filepath.Dir(exe), AppName)
|
||||
_, err = os.Stat(appExe)
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
// Try the standard install location
|
||||
localAppData := os.Getenv("LOCALAPPDATA")
|
||||
appExe = filepath.Join(localAppData, "Ollama", AppName)
|
||||
_, err := os.Stat(appExe)
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
// Finally look in the path
|
||||
appExe, err = exec.LookPath(AppName)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not locate ollama app")
|
||||
}
|
||||
}
|
||||
}
|
||||
// log.Printf("XXX attempting to start app %s", appExe)
|
||||
|
||||
cmd_path := "c:\\Windows\\system32\\cmd.exe"
|
||||
cmd := exec.Command(cmd_path, "/c", appExe)
|
||||
// TODO - these hide flags aren't working - still pops up a command window for some reason
|
||||
cmd.SysProcAttr = &syscall.SysProcAttr{CreationFlags: 0x08000000, HideWindow: true}
|
||||
|
||||
// TODO this didn't help either...
|
||||
cmd.Stdin = strings.NewReader("")
|
||||
cmd.Stdout = os.Stdout
|
||||
cmd.Stderr = os.Stderr
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
return fmt.Errorf("unable to start ollama app %w", err)
|
||||
}
|
||||
|
||||
if cmd.Process != nil {
|
||||
defer cmd.Process.Release() //nolint:errcheck
|
||||
}
|
||||
return waitForServer(ctx, client)
|
||||
}
|
@@ -1,200 +0,0 @@
|
||||
package convert
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"encoding/binary"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"google.golang.org/protobuf/proto"
|
||||
|
||||
"github.com/ollama/ollama/convert/sentencepiece"
|
||||
"github.com/ollama/ollama/llm"
|
||||
)
|
||||
|
||||
const (
|
||||
_ int32 = iota
|
||||
tokenTypeNormal
|
||||
tokenTypeUnknown
|
||||
tokenTypeControl
|
||||
tokenTypeUserDefined
|
||||
tokenTypeUnused
|
||||
tokenTypeByte
|
||||
)
|
||||
|
||||
type Params struct {
|
||||
Architectures []string `json:"architectures"`
|
||||
VocabSize int `json:"vocab_size"`
|
||||
HiddenSize int `json:"hidden_size"` // n_embd
|
||||
HiddenLayers int `json:"num_hidden_layers"` // n_layer
|
||||
ContextSize int `json:"max_position_embeddings"`
|
||||
IntermediateSize int `json:"intermediate_size"`
|
||||
AttentionHeads int `json:"num_attention_heads"` // n_head
|
||||
KeyValHeads int `json:"num_key_value_heads"`
|
||||
NormEPS float64 `json:"rms_norm_eps"`
|
||||
BoSTokenID int `json:"bos_token_id"`
|
||||
EoSTokenID int `json:"eos_token_id"`
|
||||
HeadDimension int `json:"head_dim"`
|
||||
PaddingTokenID int `json:"pad_token_id"`
|
||||
RopeFrequencyBase float64 `json:"rope_theta"`
|
||||
|
||||
Experts int `json:"num_local_experts"`
|
||||
ExpertsUsed int `json:"num_experts_per_tok"`
|
||||
|
||||
PreTokenizer string
|
||||
|
||||
ByteOrder
|
||||
}
|
||||
|
||||
type ByteOrder interface {
|
||||
binary.ByteOrder
|
||||
binary.AppendByteOrder
|
||||
}
|
||||
|
||||
type ModelArch interface {
|
||||
GetTensors() error
|
||||
LoadVocab() error
|
||||
WriteGGUF(io.WriteSeeker) error
|
||||
}
|
||||
|
||||
type ModelFormat interface {
|
||||
GetLayerName(string) (string, error)
|
||||
GetTensors(string, *Params) ([]llm.Tensor, error)
|
||||
GetParams(string) (*Params, error)
|
||||
GetModelArch(string, string, *Params) (ModelArch, error)
|
||||
}
|
||||
|
||||
type ModelData struct {
|
||||
Path string
|
||||
Name string
|
||||
Params *Params
|
||||
Vocab *Vocab
|
||||
Tensors []llm.Tensor
|
||||
Format ModelFormat
|
||||
}
|
||||
|
||||
func GetModelFormat(dirname string) (ModelFormat, error) {
|
||||
files, err := filepath.Glob(filepath.Join(dirname, "*"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, fn := range files {
|
||||
if strings.HasSuffix(fn, ".safetensors") {
|
||||
return &SafetensorFormat{}, nil
|
||||
} else if strings.HasSuffix(fn, ".bin") || strings.HasSuffix(fn, ".pth") {
|
||||
slog.Debug("model is torch")
|
||||
return &TorchFormat{}, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("couldn't determine model format")
|
||||
}
|
||||
|
||||
// Details on gguf's tokenizer can be found at:
|
||||
// https://github.com/ggerganov/ggml/blob/master/docs/gguf.md#tokenizer
|
||||
type Vocab struct {
|
||||
Tokens []string
|
||||
Scores []float32
|
||||
Types []int32
|
||||
Merges []string
|
||||
}
|
||||
|
||||
func LoadSentencePieceTokens(dirpath string, params *Params) (*Vocab, error) {
|
||||
slog.Info(fmt.Sprintf("reading vocab from %s", filepath.Join(dirpath, "tokenizer.model")))
|
||||
in, err := os.ReadFile(filepath.Join(dirpath, "tokenizer.model"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// To regenerate sentencepiece from the protobufs use:
|
||||
// protoc -I=./ --go_out=./ sentencepiece_model.proto
|
||||
modelProto := &sentencepiece.ModelProto{}
|
||||
if err := proto.Unmarshal(in, modelProto); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
v := &Vocab{
|
||||
Tokens: make([]string, 0),
|
||||
Scores: make([]float32, 0),
|
||||
Types: make([]int32, 0),
|
||||
}
|
||||
|
||||
pieces := modelProto.GetPieces()
|
||||
for _, p := range pieces {
|
||||
v.Tokens = append(v.Tokens, p.GetPiece())
|
||||
v.Scores = append(v.Scores, p.GetScore())
|
||||
t := p.GetType()
|
||||
switch t {
|
||||
case sentencepiece.ModelProto_SentencePiece_UNKNOWN:
|
||||
case sentencepiece.ModelProto_SentencePiece_CONTROL:
|
||||
case sentencepiece.ModelProto_SentencePiece_UNUSED:
|
||||
case sentencepiece.ModelProto_SentencePiece_BYTE:
|
||||
default:
|
||||
t = sentencepiece.ModelProto_SentencePiece_NORMAL
|
||||
}
|
||||
v.Types = append(v.Types, int32(t))
|
||||
}
|
||||
|
||||
slog.Info(fmt.Sprintf("vocab size: %d", len(v.Tokens)))
|
||||
|
||||
// add any additional tokens
|
||||
addIn, err := os.ReadFile(filepath.Join(dirpath, "added_tokens.json"))
|
||||
if os.IsNotExist(err) {
|
||||
return v, nil
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
slog.Info("reading user defined tokens")
|
||||
|
||||
var extraTokenData map[string]int
|
||||
if err := json.Unmarshal(addIn, &extraTokenData); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
type token struct {
|
||||
key string
|
||||
pos int
|
||||
}
|
||||
|
||||
extraTokens := make([]token, 0)
|
||||
for k, id := range extraTokenData {
|
||||
extraTokens = append(extraTokens, token{k, id})
|
||||
}
|
||||
|
||||
slices.SortFunc(extraTokens, func(a, b token) int {
|
||||
return cmp.Compare(a.pos, b.pos)
|
||||
})
|
||||
|
||||
numToks := len(v.Tokens)
|
||||
|
||||
for cnt, t := range extraTokens {
|
||||
// the token id should match the specific index for the total number of tokens
|
||||
if t.pos != cnt+numToks {
|
||||
return nil, fmt.Errorf("token ID '%d' for '%s' doesn't match total token size", t.pos, t.key)
|
||||
}
|
||||
v.Tokens = append(v.Tokens, t.key)
|
||||
v.Scores = append(v.Scores, -1000.0)
|
||||
v.Types = append(v.Types, tokenTypeUserDefined)
|
||||
}
|
||||
slog.Info(fmt.Sprintf("vocab size w/ extra tokens: %d", len(v.Tokens)))
|
||||
|
||||
if params.VocabSize > len(v.Tokens) {
|
||||
missingTokens := params.VocabSize - len(v.Tokens)
|
||||
slog.Warn(fmt.Sprintf("vocab is missing %d tokens", missingTokens))
|
||||
for cnt := 0; cnt < missingTokens; cnt++ {
|
||||
v.Tokens = append(v.Tokens, fmt.Sprintf("<dummy%05d>", cnt+1))
|
||||
v.Scores = append(v.Scores, -1)
|
||||
v.Types = append(v.Types, tokenTypeUserDefined)
|
||||
}
|
||||
}
|
||||
|
||||
return v, nil
|
||||
}
|
@@ -1,103 +0,0 @@
|
||||
//go:build slow
|
||||
|
||||
package convert
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/ollama/ollama/llm"
|
||||
)
|
||||
|
||||
func convertFull(t *testing.T, p string) (llm.KV, llm.Tensors) {
|
||||
t.Helper()
|
||||
|
||||
mf, err := GetModelFormat(p)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
params, err := mf.GetParams(p)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
arch, err := mf.GetModelArch("", p, params)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := arch.LoadVocab(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := arch.GetTensors(); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
f, err := os.CreateTemp(t.TempDir(), "f16")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
if err := arch.WriteGGUF(f); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
r, err := os.Open(f.Name())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
m, _, err := llm.DecodeGGML(r)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
return m.KV(), m.Tensors()
|
||||
}
|
||||
|
||||
func TestConvertFull(t *testing.T) {
|
||||
cases := []struct {
|
||||
path string
|
||||
arch string
|
||||
tensors int
|
||||
layers int
|
||||
}{
|
||||
{"Meta-Llama-3-8B-Instruct", "llama", 291, 35},
|
||||
{"Mistral-7B-Instruct-v0.2", "llama", 291, 35},
|
||||
{"Mixtral-8x7B-Instruct-v0.1", "llama", 291, 35},
|
||||
{"gemma-2b-it", "gemma", 164, 20},
|
||||
}
|
||||
|
||||
for _, tt := range cases {
|
||||
t.Run(tt.path, func(t *testing.T) {
|
||||
p := filepath.Join("testdata", tt.path)
|
||||
if _, err := os.Stat(p); err != nil {
|
||||
t.Skipf("%s not found", p)
|
||||
}
|
||||
|
||||
kv, tensors := convertFull(t, p)
|
||||
|
||||
if kv.Architecture() != tt.arch {
|
||||
t.Fatalf("expected llama, got %s", kv.Architecture())
|
||||
}
|
||||
|
||||
if kv.FileType().String() != "F16" {
|
||||
t.Fatalf("expected F16, got %s", kv.FileType())
|
||||
}
|
||||
|
||||
if len(tensors) != tt.tensors {
|
||||
t.Fatalf("expected %d tensors, got %d", tt.tensors, len(tensors))
|
||||
}
|
||||
|
||||
layers := tensors.Layers()
|
||||
if len(layers) != tt.layers {
|
||||
t.Fatalf("expected %d layers, got %d", tt.layers, len(layers))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
103
convert/gemma.go
@@ -1,103 +0,0 @@
|
||||
package convert
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"strings"
|
||||
|
||||
"github.com/pdevine/tensor"
|
||||
"github.com/pdevine/tensor/native"
|
||||
|
||||
"github.com/ollama/ollama/llm"
|
||||
)
|
||||
|
||||
type GemmaModel struct {
|
||||
ModelData
|
||||
}
|
||||
|
||||
func addOnes(data []float32, vectorSize int) ([]float32, error) {
|
||||
n := tensor.New(tensor.WithShape(vectorSize), tensor.WithBacking(data))
|
||||
ones := tensor.Ones(tensor.Float32, vectorSize)
|
||||
|
||||
n, err := n.Add(ones)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ts, err := native.SelectF32(n, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var f32s []float32
|
||||
for _, t := range ts {
|
||||
f32s = append(f32s, t...)
|
||||
}
|
||||
|
||||
|
||||
return f32s, nil
|
||||
}
|
||||
|
||||
func (m *GemmaModel) GetTensors() error {
|
||||
t, err := m.Format.GetTensors(m.Path, m.Params)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
slog.Debug(fmt.Sprintf("Total tensors: %d", len(t)))
|
||||
for _, l := range t {
|
||||
if strings.HasSuffix(l.Name, "norm.weight") {
|
||||
wt := l.WriterTo.(safetensorWriterTo)
|
||||
wt.repacker = m.Repack
|
||||
l.WriterTo = wt
|
||||
}
|
||||
m.Tensors = append(m.Tensors, l)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *GemmaModel) LoadVocab() error {
|
||||
v, err := LoadSentencePieceTokens(m.Path, m.Params)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
m.Vocab = v
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *GemmaModel) Repack(_ string, data []float32, shape []uint64) ([]float32, error) {
|
||||
return addOnes(data, int(shape[0]))
|
||||
}
|
||||
|
||||
func (m *GemmaModel) WriteGGUF(ws io.WriteSeeker) error {
|
||||
kv := llm.KV{
|
||||
"general.architecture": "gemma",
|
||||
"general.name": m.Name,
|
||||
"gemma.context_length": uint32(m.Params.ContextSize),
|
||||
"gemma.embedding_length": uint32(m.Params.HiddenSize),
|
||||
"gemma.block_count": uint32(m.Params.HiddenLayers),
|
||||
"gemma.feed_forward_length": uint32(m.Params.IntermediateSize),
|
||||
"gemma.attention.head_count": uint32(m.Params.AttentionHeads),
|
||||
"gemma.attention.head_count_kv": uint32(m.Params.KeyValHeads),
|
||||
"gemma.attention.layer_norm_rms_epsilon": float32(m.Params.NormEPS),
|
||||
"gemma.attention.key_length": uint32(m.Params.HeadDimension),
|
||||
"gemma.attention.value_length": uint32(m.Params.HeadDimension),
|
||||
"general.file_type": uint32(1),
|
||||
"tokenizer.ggml.model": "llama",
|
||||
|
||||
"tokenizer.ggml.tokens": m.Vocab.Tokens,
|
||||
"tokenizer.ggml.scores": m.Vocab.Scores,
|
||||
"tokenizer.ggml.token_type": m.Vocab.Types,
|
||||
|
||||
"tokenizer.ggml.bos_token_id": uint32(m.Params.BoSTokenID),
|
||||
"tokenizer.ggml.eos_token_id": uint32(m.Params.EoSTokenID),
|
||||
"tokenizer.ggml.padding_token_id": uint32(m.Params.PaddingTokenID),
|
||||
"tokenizer.ggml.unknown_token_id": uint32(3),
|
||||
"tokenizer.ggml.add_bos_token": true,
|
||||
"tokenizer.ggml.add_eos_token": false,
|
||||
}
|
||||
|
||||
return llm.NewGGUFV3(m.Params.ByteOrder).Encode(ws, kv, m.Tensors)
|
||||
}
|
158
convert/llama.go
@@ -1,158 +0,0 @@
|
||||
package convert
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/pdevine/tensor"
|
||||
"github.com/pdevine/tensor/native"
|
||||
|
||||
"github.com/ollama/ollama/llm"
|
||||
)
|
||||
|
||||
type LlamaModel struct {
|
||||
ModelData
|
||||
}
|
||||
|
||||
func (m *LlamaModel) GetTensors() error {
|
||||
t, err := m.Format.GetTensors(m.Path, m.Params)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
pattern := `^blk\.[0-9]+\.attn_(?P<layer>q|k)\.weight$`
|
||||
re, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, l := range t {
|
||||
matches := re.FindAllStringSubmatch(l.Name, -1)
|
||||
if len(matches) > 0 {
|
||||
switch m.Format.(type) {
|
||||
case *TorchFormat:
|
||||
wt := l.WriterTo.(torchWriterTo)
|
||||
wt.repacker = m.Repack
|
||||
l.WriterTo = wt
|
||||
case *SafetensorFormat:
|
||||
wt := l.WriterTo.(safetensorWriterTo)
|
||||
wt.repacker = m.Repack
|
||||
l.WriterTo = wt
|
||||
}
|
||||
}
|
||||
m.Tensors = append(m.Tensors, l)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *LlamaModel) LoadVocab() (err error) {
|
||||
pre, ts, merges, err := parseTokens(filepath.Join(m.Path, "tokenizer.json"))
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
return nil
|
||||
} else if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
m.Vocab = &Vocab{}
|
||||
for _, t := range ts {
|
||||
m.Vocab.Tokens = append(m.Vocab.Tokens, t.Content)
|
||||
m.Vocab.Types = append(m.Vocab.Types, t.Type())
|
||||
}
|
||||
|
||||
m.Vocab.Merges = merges
|
||||
m.Params.PreTokenizer = pre
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *LlamaModel) WriteGGUF(ws io.WriteSeeker) error {
|
||||
kv := llm.KV{
|
||||
"general.architecture": "llama",
|
||||
"general.name": m.Name,
|
||||
"llama.vocab_size": uint32(len(m.Vocab.Tokens)),
|
||||
"llama.context_length": uint32(m.Params.ContextSize),
|
||||
"llama.embedding_length": uint32(m.Params.HiddenSize),
|
||||
"llama.block_count": uint32(m.Params.HiddenLayers),
|
||||
"llama.feed_forward_length": uint32(m.Params.IntermediateSize),
|
||||
"llama.rope.freq_base": float32(m.Params.RopeFrequencyBase),
|
||||
"llama.rope.dimension_count": uint32(m.Params.HiddenSize / m.Params.AttentionHeads),
|
||||
"llama.attention.head_count": uint32(m.Params.AttentionHeads),
|
||||
"llama.attention.head_count_kv": uint32(m.Params.KeyValHeads),
|
||||
"llama.attention.layer_norm_rms_epsilon": float32(m.Params.NormEPS),
|
||||
"general.file_type": uint32(1),
|
||||
"tokenizer.ggml.model": "gpt2",
|
||||
|
||||
"tokenizer.ggml.pre": m.Params.PreTokenizer,
|
||||
"tokenizer.ggml.tokens": m.Vocab.Tokens,
|
||||
"tokenizer.ggml.token_type": m.Vocab.Types,
|
||||
|
||||
"tokenizer.ggml.bos_token_id": uint32(m.Params.BoSTokenID),
|
||||
"tokenizer.ggml.eos_token_id": uint32(m.Params.EoSTokenID),
|
||||
"tokenizer.ggml.unknown_token_id": uint32(0),
|
||||
}
|
||||
|
||||
if len(m.Vocab.Merges) > 0 {
|
||||
kv["tokenizer.ggml.merges"] = m.Vocab.Merges
|
||||
} else {
|
||||
kv["tokenizer.ggml.scores"] = m.Vocab.Scores
|
||||
}
|
||||
|
||||
return llm.NewGGUFV3(m.Params.ByteOrder).Encode(ws, kv, m.Tensors)
|
||||
}
|
||||
|
||||
func (m *LlamaModel) Repack(name string, data []float32, shape []uint64) ([]float32, error) {
|
||||
return llamaRepack(name, m.Params, data, shape)
|
||||
}
|
||||
|
||||
func llamaRepack(name string, params *Params, data []float32, shape []uint64) ([]float32, error) {
|
||||
var dims []int
|
||||
for _, dim := range shape {
|
||||
if dim != 0 {
|
||||
dims = append(dims, int(dim))
|
||||
}
|
||||
}
|
||||
|
||||
var heads int
|
||||
if strings.HasSuffix(name, "attn_q.weight") {
|
||||
heads = params.AttentionHeads
|
||||
} else if strings.HasSuffix(name, "attn_k.weight") {
|
||||
heads = cmp.Or(params.KeyValHeads, params.AttentionHeads)
|
||||
} else {
|
||||
return nil, fmt.Errorf("unknown tensor name: %s", name)
|
||||
}
|
||||
|
||||
n := tensor.New(tensor.WithShape(dims...), tensor.WithBacking(data))
|
||||
if err := n.Reshape(append([]int{heads, 2, dims[0] / heads / 2}, dims[1:]...)...); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := n.T(0, 2, 1, 3); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := n.Reshape(dims...); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := n.Transpose(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ts, err := native.SelectF32(n, 1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var f32s []float32
|
||||
for _, t := range ts {
|
||||
f32s = append(f32s, t...)
|
||||
}
|
||||
|
||||
return f32s, nil
|
||||
}
|
@@ -1,79 +0,0 @@
|
||||
package convert
|
||||
|
||||
import (
|
||||
"io"
|
||||
"regexp"
|
||||
|
||||
"github.com/ollama/ollama/llm"
|
||||
)
|
||||
|
||||
type MistralModel struct {
|
||||
ModelData
|
||||
}
|
||||
|
||||
func (m *MistralModel) GetTensors() error {
|
||||
t, err := m.Format.GetTensors(m.Path, m.Params)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
pattern := `^blk\.[0-9]+\.attn_(?P<layer>q|k)\.weight$`
|
||||
re, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, l := range t {
|
||||
matches := re.FindAllStringSubmatch(l.Name, -1)
|
||||
if len(matches) > 0 {
|
||||
wt := l.WriterTo.(safetensorWriterTo)
|
||||
wt.repacker = m.Repack
|
||||
l.WriterTo = wt
|
||||
}
|
||||
m.Tensors = append(m.Tensors, l)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *MistralModel) LoadVocab() error {
|
||||
v, err := LoadSentencePieceTokens(m.Path, m.Params)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
m.Vocab = v
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *MistralModel) WriteGGUF(ws io.WriteSeeker) error {
|
||||
kv := llm.KV{
|
||||
"general.architecture": "llama",
|
||||
"general.name": m.Name,
|
||||
"llama.context_length": uint32(m.Params.ContextSize),
|
||||
"llama.embedding_length": uint32(m.Params.HiddenSize),
|
||||
"llama.block_count": uint32(m.Params.HiddenLayers),
|
||||
"llama.feed_forward_length": uint32(m.Params.IntermediateSize),
|
||||
"llama.rope.dimension_count": uint32(m.Params.HiddenSize / m.Params.AttentionHeads),
|
||||
"llama.attention.head_count": uint32(m.Params.AttentionHeads),
|
||||
"llama.attention.head_count_kv": uint32(m.Params.KeyValHeads),
|
||||
"llama.attention.layer_norm_rms_epsilon": float32(m.Params.NormEPS),
|
||||
"general.file_type": uint32(1),
|
||||
"tokenizer.ggml.model": "llama",
|
||||
|
||||
"tokenizer.ggml.tokens": m.Vocab.Tokens,
|
||||
"tokenizer.ggml.scores": m.Vocab.Scores,
|
||||
"tokenizer.ggml.token_type": m.Vocab.Types,
|
||||
|
||||
"tokenizer.ggml.bos_token_id": uint32(m.Params.BoSTokenID),
|
||||
"tokenizer.ggml.eos_token_id": uint32(m.Params.EoSTokenID),
|
||||
"tokenizer.ggml.add_bos_token": true,
|
||||
"tokenizer.ggml.add_eos_token": false,
|
||||
"tokenizer.ggml.unknown_token_id": uint32(0),
|
||||
}
|
||||
|
||||
return llm.NewGGUFV3(m.Params.ByteOrder).Encode(ws, kv, m.Tensors)
|
||||
}
|
||||
|
||||
func (m *MistralModel) Repack(name string, data []float32, shape []uint64) ([]float32, error) {
|
||||
return llamaRepack(name, m.Params, data, shape)
|
||||
}
|
@@ -1,87 +0,0 @@
|
||||
package convert
|
||||
|
||||
import (
|
||||
"io"
|
||||
"regexp"
|
||||
|
||||
"github.com/ollama/ollama/llm"
|
||||
)
|
||||
|
||||
type MixtralModel struct {
|
||||
ModelData
|
||||
}
|
||||
|
||||
func (m *MixtralModel) GetTensors() error {
|
||||
t, err := m.Format.GetTensors(m.Path, m.Params)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
pattern := `^blk\.[0-9]+\.attn_(?P<layer>q|k)\.weight$`
|
||||
re, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, l := range t {
|
||||
matches := re.FindAllStringSubmatch(l.Name, -1)
|
||||
if len(matches) > 0 {
|
||||
wt := l.WriterTo.(safetensorWriterTo)
|
||||
wt.repacker = m.Repack
|
||||
l.WriterTo = wt
|
||||
}
|
||||
m.Tensors = append(m.Tensors, l)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *MixtralModel) LoadVocab() error {
|
||||
v, err := LoadSentencePieceTokens(m.Path, m.Params)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
m.Vocab = v
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *MixtralModel) WriteGGUF(ws io.WriteSeeker) error {
|
||||
kv := llm.KV{
|
||||
"general.architecture": "llama",
|
||||
"general.name": m.Name,
|
||||
"llama.block_count": uint32(m.Params.HiddenLayers),
|
||||
"llama.context_length": uint32(m.Params.ContextSize),
|
||||
"llama.embedding_length": uint32(m.Params.HiddenSize),
|
||||
"llama.feed_forward_length": uint32(m.Params.IntermediateSize),
|
||||
"llama.attention.head_count": uint32(m.Params.AttentionHeads),
|
||||
"llama.attention.head_count_kv": uint32(m.Params.KeyValHeads),
|
||||
|
||||
"llama.rope.freq_base": float32(m.Params.RopeFrequencyBase),
|
||||
"llama.attention.layer_norm_rms_epsilon": float32(m.Params.NormEPS),
|
||||
|
||||
"llama.expert_count": uint32(m.Params.Experts),
|
||||
"llama.expert_used_count": uint32(m.Params.ExpertsUsed),
|
||||
|
||||
"llama.vocab_size": uint32(len(m.Vocab.Tokens)),
|
||||
"llama.rope.dimension_count": uint32(m.Params.HiddenSize / m.Params.AttentionHeads),
|
||||
|
||||
"general.file_type": uint32(1),
|
||||
"tokenizer.ggml.model": "llama",
|
||||
|
||||
"tokenizer.ggml.tokens": m.Vocab.Tokens,
|
||||
"tokenizer.ggml.scores": m.Vocab.Scores,
|
||||
"tokenizer.ggml.token_type": m.Vocab.Types,
|
||||
|
||||
"tokenizer.ggml.bos_token_id": uint32(m.Params.BoSTokenID),
|
||||
"tokenizer.ggml.eos_token_id": uint32(m.Params.EoSTokenID),
|
||||
"tokenizer.ggml.unknown_token_id": uint32(0),
|
||||
"tokenizer.ggml.add_bos_token": true,
|
||||
"tokenizer.ggml.add_eos_token": false,
|
||||
}
|
||||
|
||||
return llm.NewGGUFV3(m.Params.ByteOrder).Encode(ws, kv, m.Tensors)
|
||||
}
|
||||
|
||||
func (m *MixtralModel) Repack(name string, data []float32, shape []uint64) ([]float32, error) {
|
||||
return llamaRepack(name, m.Params, data, shape)
|
||||
}
|
@@ -1,309 +0,0 @@
|
||||
package convert
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/d4l3k/go-bfloat16"
|
||||
"github.com/x448/float16"
|
||||
|
||||
"github.com/ollama/ollama/llm"
|
||||
)
|
||||
|
||||
type safetensorWriterTo struct {
|
||||
t *llm.Tensor
|
||||
|
||||
params *Params
|
||||
bo ByteOrder
|
||||
|
||||
filename string
|
||||
dtype string
|
||||
|
||||
offset, size int64
|
||||
repacker func(string, []float32, []uint64) ([]float32, error)
|
||||
}
|
||||
|
||||
type safetensorMetadata struct {
|
||||
Type string `json:"dtype"`
|
||||
Shape []uint64 `json:"shape"`
|
||||
Offsets []int64 `json:"data_offsets"`
|
||||
}
|
||||
|
||||
type SafetensorFormat struct{}
|
||||
|
||||
func (m *SafetensorFormat) GetTensors(dirpath string, params *Params) ([]llm.Tensor, error) {
|
||||
var tensors []llm.Tensor
|
||||
matches, err := filepath.Glob(filepath.Join(dirpath, "*.safetensors"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var offset uint64
|
||||
for _, f := range matches {
|
||||
var t []llm.Tensor
|
||||
var err error
|
||||
t, offset, err = m.readTensors(f, offset, params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tensors = append(tensors, t...)
|
||||
}
|
||||
return tensors, nil
|
||||
}
|
||||
|
||||
func (m *SafetensorFormat) readTensors(fn string, offset uint64, params *Params) ([]llm.Tensor, uint64, error) {
|
||||
f, err := os.Open(fn)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
var n int64
|
||||
if err := binary.Read(f, binary.LittleEndian, &n); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
b := bytes.NewBuffer(make([]byte, 0, n))
|
||||
if _, err = io.CopyN(b, f, n); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
var headers map[string]safetensorMetadata
|
||||
if err := json.NewDecoder(b).Decode(&headers); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
var keys []string
|
||||
for key := range headers {
|
||||
if !strings.HasSuffix(key, "self_attn.rotary_embd.inv_freq") {
|
||||
keys = append(keys, key)
|
||||
}
|
||||
}
|
||||
|
||||
slices.Sort(keys)
|
||||
|
||||
var tensors []llm.Tensor
|
||||
for _, key := range keys {
|
||||
value := headers[key]
|
||||
|
||||
var kind uint32
|
||||
switch len(value.Shape) {
|
||||
case 0:
|
||||
// valuedata
|
||||
continue
|
||||
case 2:
|
||||
kind = 1
|
||||
}
|
||||
|
||||
name, err := m.GetLayerName(key)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
shape := make([]uint64, len(value.Shape))
|
||||
copy(shape, value.Shape)
|
||||
|
||||
pad := func(s int64) int64 {
|
||||
return 8 + n + s
|
||||
}
|
||||
|
||||
t := llm.Tensor{
|
||||
Name: name,
|
||||
Kind: kind,
|
||||
Offset: offset,
|
||||
Shape: shape[:],
|
||||
}
|
||||
|
||||
t.WriterTo = safetensorWriterTo{
|
||||
t: &t,
|
||||
params: params,
|
||||
bo: params.ByteOrder,
|
||||
filename: fn,
|
||||
dtype: value.Type,
|
||||
offset: pad(value.Offsets[0]),
|
||||
size: pad(value.Offsets[1]) - pad(value.Offsets[0]),
|
||||
}
|
||||
|
||||
offset += t.Size()
|
||||
tensors = append(tensors, t)
|
||||
}
|
||||
|
||||
return tensors, offset, nil
|
||||
}
|
||||
|
||||
func (m *SafetensorFormat) GetParams(dirpath string) (*Params, error) {
|
||||
f, err := os.Open(filepath.Join(dirpath, "config.json"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
var params Params
|
||||
|
||||
if err := json.NewDecoder(f).Decode(¶ms); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
params.ByteOrder = binary.LittleEndian
|
||||
return ¶ms, nil
|
||||
}
|
||||
|
||||
func (m *SafetensorFormat) GetLayerName(n string) (string, error) {
|
||||
directMap := map[string]string{
|
||||
"model.embed_tokens.weight": "token_embd.weight",
|
||||
"lm_head.weight": "output.weight",
|
||||
"model.norm.weight": "output_norm.weight",
|
||||
}
|
||||
|
||||
tMap := map[string]string{
|
||||
"model.layers.(\\d+).input_layernorm.weight": "blk.$1.attn_norm.weight",
|
||||
"model.layers.(\\d+).mlp.down_proj.weight": "blk.$1.ffn_down.weight",
|
||||
"model.layers.(\\d+).mlp.gate_proj.weight": "blk.$1.ffn_gate.weight",
|
||||
"model.layers.(\\d+).mlp.up_proj.weight": "blk.$1.ffn_up.weight",
|
||||
"model.layers.(\\d+).post_attention_layernorm.weight": "blk.$1.ffn_norm.weight",
|
||||
"model.layers.(\\d+).self_attn.k_proj.weight": "blk.$1.attn_k.weight",
|
||||
"model.layers.(\\d+).self_attn.o_proj.weight": "blk.$1.attn_output.weight",
|
||||
"model.layers.(\\d+).self_attn.q_proj.weight": "blk.$1.attn_q.weight",
|
||||
"model.layers.(\\d+).self_attn.v_proj.weight": "blk.$1.attn_v.weight",
|
||||
"model.layers.(\\d+).block_sparse_moe.gate.weight": "blk.$1.ffn_gate_inp.weight",
|
||||
"model.layers.(\\d+).block_sparse_moe.experts.(\\d+).w1.weight": "blk.$1.ffn_gate.$2.weight",
|
||||
"model.layers.(\\d+).block_sparse_moe.experts.(\\d+).w2.weight": "blk.$1.ffn_down.$2.weight",
|
||||
"model.layers.(\\d+).block_sparse_moe.experts.(\\d+).w3.weight": "blk.$1.ffn_up.$2.weight",
|
||||
}
|
||||
|
||||
v, ok := directMap[n]
|
||||
if ok {
|
||||
return v, nil
|
||||
}
|
||||
|
||||
// quick hack to rename the layers to gguf format
|
||||
for k, v := range tMap {
|
||||
re := regexp.MustCompile(k)
|
||||
newName := re.ReplaceAllString(n, v)
|
||||
if newName != n {
|
||||
return newName, nil
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("couldn't find a layer name for '%s'", n)
|
||||
}
|
||||
|
||||
func (r safetensorWriterTo) WriteTo(w io.Writer) (n int64, err error) {
|
||||
f, err := os.Open(r.filename)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
if _, err = f.Seek(r.offset, io.SeekStart); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
var f32s []float32
|
||||
switch r.dtype {
|
||||
case "F32":
|
||||
f32s = make([]float32, r.size/4)
|
||||
if err = binary.Read(f, r.bo, f32s); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
case "F16":
|
||||
u16s := make([]uint16, r.size/2)
|
||||
if err = binary.Read(f, r.bo, u16s); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
for _, b := range u16s {
|
||||
f32s = append(f32s, float16.Frombits(b).Float32())
|
||||
}
|
||||
|
||||
case "BF16":
|
||||
u8s := make([]uint8, r.size)
|
||||
if err = binary.Read(f, r.bo, u8s); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
f32s = bfloat16.DecodeFloat32(u8s)
|
||||
default:
|
||||
return 0, fmt.Errorf("unknown data type: %s", r.dtype)
|
||||
}
|
||||
|
||||
if r.repacker != nil {
|
||||
f32s, err = r.repacker(r.t.Name, f32s, r.t.Shape)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
switch r.t.Kind {
|
||||
case 0:
|
||||
return 0, binary.Write(w, r.bo, f32s)
|
||||
case 1:
|
||||
f16s := make([]uint16, len(f32s))
|
||||
for i := range f32s {
|
||||
f16s[i] = float16.Fromfloat32(f32s[i]).Bits()
|
||||
}
|
||||
|
||||
return 0, binary.Write(w, r.bo, f16s)
|
||||
default:
|
||||
return 0, fmt.Errorf("unknown storage type: %d", r.t.Kind)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *SafetensorFormat) GetModelArch(name, dirPath string, params *Params) (ModelArch, error) {
|
||||
switch len(params.Architectures) {
|
||||
case 0:
|
||||
return nil, fmt.Errorf("No architecture specified to convert")
|
||||
case 1:
|
||||
switch params.Architectures[0] {
|
||||
case "LlamaForCausalLM":
|
||||
return &LlamaModel{
|
||||
ModelData{
|
||||
Name: name,
|
||||
Path: dirPath,
|
||||
Params: params,
|
||||
Format: m,
|
||||
},
|
||||
}, nil
|
||||
case "MistralForCausalLM":
|
||||
return &MistralModel{
|
||||
ModelData{
|
||||
Name: name,
|
||||
Path: dirPath,
|
||||
Params: params,
|
||||
Format: m,
|
||||
},
|
||||
}, nil
|
||||
case "MixtralForCausalLM":
|
||||
return &MixtralModel{
|
||||
ModelData{
|
||||
Name: name,
|
||||
Path: dirPath,
|
||||
Params: params,
|
||||
Format: m,
|
||||
},
|
||||
}, nil
|
||||
case "GemmaForCausalLM":
|
||||
return &GemmaModel{
|
||||
ModelData{
|
||||
Name: name,
|
||||
Path: dirPath,
|
||||
Params: params,
|
||||
Format: m,
|
||||
},
|
||||
}, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("Models based on '%s' are not yet supported", params.Architectures[0])
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("Unknown error")
|
||||
}
|