2023-12-24 16:34:33 +01:00
|
|
|
namespace: ollama-namespace
|
|
|
|
|
|
|
|
ollama:
|
|
|
|
replicaCount: 1
|
|
|
|
image: ollama/ollama:latest
|
|
|
|
servicePort: 11434
|
|
|
|
resources:
|
2024-02-17 17:20:53 +01:00
|
|
|
requests:
|
2023-12-24 16:34:33 +01:00
|
|
|
cpu: "2000m"
|
|
|
|
memory: "2Gi"
|
2024-02-17 17:20:53 +01:00
|
|
|
limits:
|
|
|
|
cpu: "4000m"
|
|
|
|
memory: "4Gi"
|
2023-12-24 16:49:56 +01:00
|
|
|
nvidia.com/gpu: "0"
|
2024-02-17 17:20:53 +01:00
|
|
|
volumeSize: 30Gi
|
2023-12-28 16:28:09 +01:00
|
|
|
nodeSelector: {}
|
|
|
|
tolerations: []
|
|
|
|
service:
|
|
|
|
type: ClusterIP
|
|
|
|
gpu:
|
|
|
|
enabled: false
|
2023-12-24 16:34:33 +01:00
|
|
|
|
|
|
|
webui:
|
|
|
|
replicaCount: 1
|
|
|
|
image: ghcr.io/ollama-webui/ollama-webui:main
|
|
|
|
servicePort: 8080
|
|
|
|
resources:
|
2024-02-17 17:20:53 +01:00
|
|
|
requests:
|
2023-12-24 16:34:33 +01:00
|
|
|
cpu: "500m"
|
|
|
|
memory: "500Mi"
|
2024-02-17 17:20:53 +01:00
|
|
|
limits:
|
|
|
|
cpu: "1000m"
|
|
|
|
memory: "1Gi"
|
2023-12-24 16:34:33 +01:00
|
|
|
ingress:
|
2023-12-28 01:49:51 +01:00
|
|
|
enabled: true
|
|
|
|
annotations:
|
|
|
|
# Use appropriate annotations for your Ingress controller, e.g., for NGINX:
|
|
|
|
# nginx.ingress.kubernetes.io/rewrite-target: /
|
2023-12-24 16:34:33 +01:00
|
|
|
host: ollama.minikube.local
|
2024-02-17 17:20:53 +01:00
|
|
|
volumeSize: 2Gi
|
2023-12-28 16:28:09 +01:00
|
|
|
nodeSelector: {}
|
|
|
|
tolerations: []
|
|
|
|
service:
|
|
|
|
type: NodePort
|