chore: bump kubernetes resources

This commit is contained in:
braveokafor 2024-02-17 17:20:53 +01:00
parent 0340967930
commit 31b903d831
4 changed files with 21 additions and 8 deletions

View file

@ -5,11 +5,14 @@ ollama:
image: ollama/ollama:latest image: ollama/ollama:latest
servicePort: 11434 servicePort: 11434
resources: resources:
limits: requests:
cpu: "2000m" cpu: "2000m"
memory: "2Gi" memory: "2Gi"
limits:
cpu: "4000m"
memory: "4Gi"
nvidia.com/gpu: "0" nvidia.com/gpu: "0"
volumeSize: 1Gi volumeSize: 30Gi
nodeSelector: {} nodeSelector: {}
tolerations: [] tolerations: []
service: service:
@ -22,16 +25,19 @@ webui:
image: ghcr.io/ollama-webui/ollama-webui:main image: ghcr.io/ollama-webui/ollama-webui:main
servicePort: 8080 servicePort: 8080
resources: resources:
limits: requests:
cpu: "500m" cpu: "500m"
memory: "500Mi" memory: "500Mi"
limits:
cpu: "1000m"
memory: "1Gi"
ingress: ingress:
enabled: true enabled: true
annotations: annotations:
# Use appropriate annotations for your Ingress controller, e.g., for NGINX: # Use appropriate annotations for your Ingress controller, e.g., for NGINX:
# nginx.ingress.kubernetes.io/rewrite-target: / # nginx.ingress.kubernetes.io/rewrite-target: /
host: ollama.minikube.local host: ollama.minikube.local
volumeSize: 1Gi volumeSize: 2Gi
nodeSelector: {} nodeSelector: {}
tolerations: [] tolerations: []
service: service:

View file

@ -20,9 +20,13 @@ spec:
ports: ports:
- containerPort: 11434 - containerPort: 11434
resources: resources:
limits: requests:
cpu: "2000m" cpu: "2000m"
memory: "2Gi" memory: "2Gi"
limits:
cpu: "4000m"
memory: "4Gi"
nvidia.com/gpu: "0"
volumeMounts: volumeMounts:
- name: ollama-volume - name: ollama-volume
mountPath: /root/.ollama mountPath: /root/.ollama
@ -34,4 +38,4 @@ spec:
accessModes: [ "ReadWriteOnce" ] accessModes: [ "ReadWriteOnce" ]
resources: resources:
requests: requests:
storage: 1Gi storage: 30Gi

View file

@ -19,9 +19,12 @@ spec:
ports: ports:
- containerPort: 8080 - containerPort: 8080
resources: resources:
limits: requests:
cpu: "500m" cpu: "500m"
memory: "500Mi" memory: "500Mi"
limits:
cpu: "1000m"
memory: "1Gi"
env: env:
- name: OLLAMA_API_BASE_URL - name: OLLAMA_API_BASE_URL
value: "http://ollama-service.ollama-namespace.svc.cluster.local:11434/api" value: "http://ollama-service.ollama-namespace.svc.cluster.local:11434/api"

View file

@ -9,4 +9,4 @@ spec:
accessModes: ["ReadWriteOnce"] accessModes: ["ReadWriteOnce"]
resources: resources:
requests: requests:
storage: 1Gi storage: 2Gi