2023-12-24 16:34:33 +01:00
|
|
|
namespace: ollama-namespace
|
|
|
|
|
|
|
|
ollama:
|
|
|
|
replicaCount: 1
|
|
|
|
image: ollama/ollama:latest
|
|
|
|
servicePort: 11434
|
|
|
|
resources:
|
|
|
|
limits:
|
|
|
|
cpu: "2000m"
|
|
|
|
memory: "2Gi"
|
2023-12-24 16:49:56 +01:00
|
|
|
nvidia.com/gpu: "0"
|
2023-12-24 16:34:33 +01:00
|
|
|
volumeSize: 1Gi
|
|
|
|
|
|
|
|
webui:
|
|
|
|
replicaCount: 1
|
|
|
|
image: ghcr.io/ollama-webui/ollama-webui:main
|
|
|
|
servicePort: 8080
|
|
|
|
resources:
|
|
|
|
limits:
|
|
|
|
cpu: "500m"
|
|
|
|
memory: "500Mi"
|
|
|
|
ingress:
|
|
|
|
host: ollama.minikube.local
|