Files
public-k3s/cluster/helm/ollama/values.yml
2025-07-07 06:28:45 +05:30

21 lines
313 B
YAML

# https://otwld.github.io/ollama-helm/
ollama:
gpu:
enabled: true
type: nvidia
models:
pull:
- gemma3:4b
run:
- gemma3:4b
nodeSelector:
gpu: nvidia
service:
type: LoadBalancer
port: 3210
runtimeClassName: nvidia
persistentVolume:
enabled: true
existingClaim: ollama-pvc