mirror of
https://github.com/ollama/ollama.git
synced 2025-05-11 18:36:41 +02:00
Introduce GPU Overhead env var (#5922)
Provide a mechanism for users to set aside an amount of VRAM on each GPU to make room for other applications they want to start after Ollama, or workaround memory prediction bugs
This commit is contained in:
parent
a60d9b89ce
commit
b05c9e83d9
3 changed files with 28 additions and 3 deletions
|
@ -1421,6 +1421,7 @@ func NewCLI() *cobra.Command {
|
|||
envVars["OLLAMA_TMPDIR"],
|
||||
envVars["OLLAMA_FLASH_ATTENTION"],
|
||||
envVars["OLLAMA_LLM_LIBRARY"],
|
||||
envVars["OLLAMA_GPU_OVERHEAD"],
|
||||
})
|
||||
default:
|
||||
appendEnvDocs(cmd, envs)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue