21 lines
318 B
YAML
21 lines
318 B
YAML
# https://otwld.github.io/ollama-helm/
|
|
|
|
ollama:
|
|
gpu:
|
|
enabled: true
|
|
type: nvidia
|
|
models:
|
|
pull:
|
|
- gemma3:4b
|
|
run:
|
|
- gemma3:4b
|
|
nodeSelector:
|
|
gpu: nvidia
|
|
service:
|
|
type: LoadBalancer
|
|
port: 3210
|
|
runtimeClassName: nvidia
|
|
persistentVolume:
|
|
enabled: true
|
|
existingClaim: longhorn-ollama
|