diff --git a/llm/server.go b/llm/server.go index 528af71f9..7172d9240 100644 --- a/llm/server.go +++ b/llm/server.go @@ -325,7 +325,9 @@ func NewLlamaServer(gpus discover.GpuInfoList, modelPath string, f *ggml.GGML, a pathEnv = "LD_LIBRARY_PATH" } - var libraryPaths []string + // Note: we always put our dependency paths first + // since these are the exact version we compiled/linked against + libraryPaths := []string{discover.LibOllamaPath} if libraryPath, ok := os.LookupEnv(pathEnv); ok { libraryPaths = append(libraryPaths, filepath.SplitList(libraryPath)...) } @@ -335,13 +337,11 @@ func NewLlamaServer(gpus discover.GpuInfoList, modelPath string, f *ggml.GGML, a c := compatible[0] if libpath, ok := libs[c]; ok { slog.Debug("adding gpu library", "path", libpath) - libraryPaths = append(libraryPaths, libpath) + libraryPaths = append([]string{libpath}, libraryPaths...) ggmlPaths = append(ggmlPaths, libpath) } } - // Note: we always put the dependency path first - // since this was the exact version we compiled/linked against if gpus[0].DependencyPath != nil { slog.Debug("adding gpu dependency paths", "paths", gpus[0].DependencyPath) // assume gpus from the same library have the same dependency path diff --git a/scripts/build_windows.ps1 b/scripts/build_windows.ps1 index 60485df85..e4c0b3d93 100644 --- a/scripts/build_windows.ps1 +++ b/scripts/build_windows.ps1 @@ -121,7 +121,7 @@ function buildOllama() { if ($env:HIP_PATH) { write-host "Building ROCm backend libraries" if (-Not (get-command -ErrorAction silent ninja)) { - $NINJA_DIR=(gci -path (Get-CimInstance MSFT_VSInstance -Namespace root/cimv2/vs)[0].InstallLocation -r -fi ninja.exe) | split-path -parent + $NINJA_DIR=(gci -path (Get-CimInstance MSFT_VSInstance -Namespace root/cimv2/vs)[0].InstallLocation -r -fi ninja.exe).Directory.FullName $env:PATH="$NINJA_DIR;$env:PATH" } $env:HIPCXX="${env:HIP_PATH}\bin\clang++.exe"