diff --git a/llama/llama.go b/llama/llama.go index 6eed3d477..9add38c2c 100644 --- a/llama/llama.go +++ b/llama/llama.go @@ -37,23 +37,36 @@ COMPILER inline get_compiler() { import "C" import ( + "context" _ "embed" "errors" "fmt" + "log/slog" "os" "runtime" "runtime/cgo" "slices" "strings" - "sync/atomic" "unsafe" _ "github.com/ollama/ollama/llama/llama.cpp/common" _ "github.com/ollama/ollama/llama/llama.cpp/examples/llava" _ "github.com/ollama/ollama/llama/llama.cpp/src" - "github.com/ollama/ollama/ml/backend/ggml/ggml/src" + ggml "github.com/ollama/ollama/ml/backend/ggml/ggml/src" ) +func init() { + C.llama_log_set(C.ggml_log_callback(C.llamaLog), nil) +} + +//export llamaLog +func llamaLog(level C.int, text *C.char, _ unsafe.Pointer) { + // slog levels zeros INFO and are multiples of 4 + if slog.Default().Enabled(context.TODO(), slog.Level(int(level-C.GGML_LOG_LEVEL_INFO)*4)) { + fmt.Fprint(os.Stderr, C.GoString(text)) + } +} + func BackendInit() { ggml.OnceLoad() C.llama_backend_init() @@ -72,26 +85,6 @@ func PrintSystemInfo() string { return C.GoString(C.llama_print_system_info()) + compiler } -var logLevel atomic.Int32 - -func init() { - logLevel.Store(int32(C.GGML_LOG_LEVEL_INFO)) - C.llama_log_set((C.ggml_log_callback)(C.llamaLog), nil) -} - -func EnableDebug() { - logLevel.Store(int32(C.GGML_LOG_LEVEL_DEBUG)) -} - -//export llamaLog -func llamaLog(level int32, text *C.char, _ unsafe.Pointer) { - if level < logLevel.Load() { - return - } - - fmt.Fprint(os.Stderr, C.GoString(text)) -} - func GetModelArch(modelPath string) (string, error) { mp := C.CString(modelPath) defer C.free(unsafe.Pointer(mp)) diff --git a/ml/backend/ggml/ggml/src/ggml.go b/ml/backend/ggml/ggml/src/ggml.go index 3920e37dc..85c693eba 100644 --- a/ml/backend/ggml/ggml/src/ggml.go +++ b/ml/backend/ggml/ggml/src/ggml.go @@ -10,6 +10,8 @@ package ggml import "C" import ( + "context" + "fmt" "log/slog" "os" "path/filepath" @@ -22,21 +24,14 @@ import ( ) func init() { - C.ggml_log_set((C.ggml_log_callback)(C.sink), nil) + C.ggml_log_set(C.ggml_log_callback(C.sink), nil) } //export sink func sink(level C.int, text *C.char, _ unsafe.Pointer) { - msg := strings.TrimSpace(C.GoString(text)) - switch level { - case C.GGML_LOG_LEVEL_DEBUG: - slog.Debug(msg) - case C.GGML_LOG_LEVEL_INFO: - slog.Info(msg) - case C.GGML_LOG_LEVEL_WARN: - slog.Warn(msg) - case C.GGML_LOG_LEVEL_ERROR: - slog.Error(msg) + // slog levels zeros INFO and are multiples of 4 + if slog.Default().Enabled(context.TODO(), slog.Level(int(level-C.GGML_LOG_LEVEL_INFO)*4)) { + fmt.Fprint(os.Stderr, C.GoString(text)) } } diff --git a/runner/llamarunner/runner.go b/runner/llamarunner/runner.go index 72873ec4d..f9d204015 100644 --- a/runner/llamarunner/runner.go +++ b/runner/llamarunner/runner.go @@ -915,7 +915,6 @@ func Execute(args []string) error { level := slog.LevelInfo if *verbose { level = slog.LevelDebug - llama.EnableDebug() } handler := slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{ Level: level,