Compare commits
6 Commits
v0.4.7
...
brucemacd/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a5bc4b7c17 | ||
|
|
1be080403d | ||
|
|
55c3efa900 | ||
|
|
1aedffad93 | ||
|
|
ff6c2d6dc8 | ||
|
|
d543b282a7 |
@@ -359,6 +359,7 @@ See the [API documentation](./docs/api.md) for all endpoints.
|
||||
- [Nosia](https://github.com/nosia-ai/nosia) (Easy to install and use RAG platform based on Ollama)
|
||||
- [Witsy](https://github.com/nbonamy/witsy) (An AI Desktop application avaiable for Mac/Windows/Linux)
|
||||
- [Abbey](https://github.com/US-Artificial-Intelligence/abbey) (A configurable AI interface server with notebooks, document storage, and YouTube support)
|
||||
- [Minima](https://github.com/dmayboroda/minima) (RAG with on-premises or fully local workflow)
|
||||
|
||||
### Cloud
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
@@ -180,18 +179,14 @@ Weigh anchor!
|
||||
|
||||
t.Run("license", func(t *testing.T) {
|
||||
var b bytes.Buffer
|
||||
license, err := os.ReadFile(filepath.Join("..", "LICENSE"))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
license := "MIT License\nCopyright (c) Ollama\n"
|
||||
if err := showInfo(&api.ShowResponse{
|
||||
Details: api.ModelDetails{
|
||||
Family: "test",
|
||||
ParameterSize: "7B",
|
||||
QuantizationLevel: "FP16",
|
||||
},
|
||||
License: string(license),
|
||||
License: license,
|
||||
}, &b); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
@@ -49,10 +49,10 @@ Advanced parameters (optional):
|
||||
- `options`: additional model parameters listed in the documentation for the [Modelfile](./modelfile.md#valid-parameters-and-values) such as `temperature`
|
||||
- `system`: system message to (overrides what is defined in the `Modelfile`)
|
||||
- `template`: the prompt template to use (overrides what is defined in the `Modelfile`)
|
||||
- `context`: the context parameter returned from a previous request to `/generate`, this can be used to keep a short conversational memory
|
||||
- `stream`: if `false` the response will be returned as a single response object, rather than a stream of objects
|
||||
- `raw`: if `true` no formatting will be applied to the prompt. You may choose to use the `raw` parameter if you are specifying a full templated prompt in your request to the API
|
||||
- `keep_alive`: controls how long the model will stay loaded into memory following the request (default: `5m`)
|
||||
- `context` (deprecated): the context parameter returned from a previous request to `/generate`, this can be used to keep a short conversational memory
|
||||
|
||||
#### JSON mode
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ SYSTEM You are Mario from super mario bros, acting as an assistant.
|
||||
To use this:
|
||||
|
||||
1. Save it as a file (e.g. `Modelfile`)
|
||||
2. `ollama create choose-a-model-name -f <location of the file e.g. ./Modelfile>'`
|
||||
2. `ollama create choose-a-model-name -f <location of the file e.g. ./Modelfile>`
|
||||
3. `ollama run choose-a-model-name`
|
||||
4. Start using the model!
|
||||
|
||||
|
||||
@@ -802,6 +802,12 @@ func PushModel(ctx context.Context, name string, regOpts *registryOptions, fn fu
|
||||
if mp.ProtocolScheme == "http" && !regOpts.Insecure {
|
||||
return errors.New("insecure protocol http")
|
||||
}
|
||||
if mp.Namespace != strings.ToLower(mp.Namespace) {
|
||||
return fmt.Errorf("namespace must be lowercase, but is %s", mp.Namespace)
|
||||
}
|
||||
if mp.Repository != strings.ToLower(mp.Repository) {
|
||||
return fmt.Errorf("model name must be lowercase, but is %s", mp.Repository)
|
||||
}
|
||||
|
||||
manifest, _, err := GetManifest(mp)
|
||||
if err != nil {
|
||||
|
||||
50
server/images_test.go
Normal file
50
server/images_test.go
Normal file
@@ -0,0 +1,50 @@
|
||||
package server
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/ollama/ollama/api"
|
||||
)
|
||||
|
||||
func TestPushModel(t *testing.T) {
|
||||
noOpProgress := func(resp api.ProgressResponse) {}
|
||||
|
||||
tests := []struct {
|
||||
modelStr string
|
||||
regOpts *registryOptions
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
modelStr: "http://example.com/namespace/repo:tag",
|
||||
regOpts: ®istryOptions{Insecure: false},
|
||||
wantErr: "insecure protocol http",
|
||||
},
|
||||
{
|
||||
modelStr: "docker://Example/repo:tag",
|
||||
regOpts: ®istryOptions{},
|
||||
wantErr: "namespace must be lowercase, but is Example",
|
||||
},
|
||||
{
|
||||
modelStr: "docker://example/Repo:tag",
|
||||
regOpts: ®istryOptions{},
|
||||
wantErr: "model name must be lowercase, but is Repo",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.modelStr, func(t *testing.T) {
|
||||
err := PushModel(context.Background(), tt.modelStr, tt.regOpts, noOpProgress)
|
||||
|
||||
if tt.wantErr != "" {
|
||||
if err == nil {
|
||||
t.Errorf("PushModel() error = %v, wantErr %v", err, tt.wantErr)
|
||||
} else if !strings.Contains(err.Error(), tt.wantErr) {
|
||||
t.Errorf("PushModel() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -251,6 +251,7 @@ func (s *Server) GenerateHandler(c *gin.Context) {
|
||||
|
||||
var b bytes.Buffer
|
||||
if req.Context != nil {
|
||||
slog.Warn("the context field is deprecated and will be removed in a future version of Ollama")
|
||||
s, err := r.Detokenize(c.Request.Context(), req.Context)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
|
||||
Reference in New Issue
Block a user