Victor Lavaud 6 mesi fa
parent
commit
cccaf27219
4 ha cambiato i file con 172 aggiunte e 0 eliminazioni
  1. 32 0
      ollama/open-telegram.yaml
  2. 98 0
      ollama/open-webui.yaml
  3. 9 0
      ollama/resource-quota.yaml
  4. 33 0
      ollama/values.yaml

+ 32 - 0
ollama/open-telegram.yaml

@@ -0,0 +1,32 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: open-telegram
+  namespace: llm
+spec:
+  replicas: 1
+  selector:
+    matchLabels:
+      name: open-telegram
+  template:
+    metadata:
+      labels:
+        name: open-telegram
+    spec:
+      containers:
+        - name: open-telegram
+          image: ruecat/ollama-telegram
+          imagePullPolicy: "Always"
+          env:
+          - name: TOKEN
+            valueFrom:
+              secretKeyRef:
+                name: telegram
+                key: apitoken
+          - name: ADMIN_IDS
+            value: "106663712"
+          - name: USER_IDS
+            value: "106663712"
+          ports:
+            - containerPort: 11434
+              name: http

+ 98 - 0
ollama/open-webui.yaml

@@ -0,0 +1,98 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: open-webui
+  namespace: llm
+spec:
+  replicas: 1
+  selector:
+    matchLabels:
+      name: open-webui
+  template:
+    metadata:
+      labels:
+        name: open-webui
+        nginx: backend
+    spec:
+      containers:
+        - name: open-webui
+          image: ghcr.io/open-webui/open-webui:main
+          imagePullPolicy: "Always"
+          env:
+          - name: OLLAMA_BASE_URL
+            value: "https://llm.dodges.it"
+          volumeMounts:
+          - name: data
+            mountPath: /app/backend/data
+          ports:
+            - containerPort: 8080
+              name: http
+          livenessProbe:
+            httpGet:
+              port: 8080
+              httpHeaders:
+                - name: Host
+                  value: "gpt.dodges.it"
+      volumes:
+        - name: data
+          persistentVolumeClaim:
+            claimName: open-webui-data
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: open-webui
+  namespace: llm
+spec:
+  type: ClusterIP
+  ipFamilyPolicy: PreferDualStack
+  selector:
+    name: open-webui
+  ports:
+    - protocol: TCP
+      port: 8080
+      targetPort: 8080
+      name: http
+---
+apiVersion: v1
+kind: PersistentVolumeClaim
+metadata:
+  name: open-webui-data
+  namespace: llm
+spec:
+  storageClassName: nfs-client
+  accessModes:
+    - ReadWriteMany
+  resources:
+    requests:
+      storage: 10Gi
+---
+apiVersion: networking.k8s.io/v1
+kind: Ingress
+metadata:
+  name: open-webui-ingress
+  namespace: llm
+  annotations:
+    cert-manager.io/cluster-issuer: "letsencrypt"
+    acme.cert-manager.io/http01-edit-in-place: "true"
+    nginx.org/client-max-body-size: "500m"
+    nginx.org/proxy-connect-timeout: "60s"
+    nginx.org/proxy-read-timeout: "60s"
+    nginx.org/hsts: "True"
+spec:
+  tls:
+  - hosts:
+    - gpt.dodges.it
+    secretName: open-webui-le-secret
+  rules:
+  - host: gpt.dodges.it
+    http:
+      paths:
+      - path: /
+        pathType: Prefix
+        backend:
+          service:
+            name: open-webui
+            port:
+              number: 8080
+  ingressClassName: nginx

+ 9 - 0
ollama/resource-quota.yaml

@@ -0,0 +1,9 @@
+apiVersion: v1
+kind: ResourceQuota
+metadata:
+  name: llm-resource-quotas
+  namespace: llm
+spec:
+  hard:
+    limits.ephemeral-storage: 2Gi
+    requests.ephemeral-storage: 1Gi

+ 33 - 0
ollama/values.yaml

@@ -0,0 +1,33 @@
+ollama:
+  gpu:
+    enabled: true
+    number: 1 
+  models: 
+    - mistral
+    - mixtral
+    - gemma
+ingress:
+  enabled: true
+  className: "nginx"
+  annotations:
+    acme.cert-manager.io/http01-edit-in-place: "true"
+    cert-manager.io/cluster-issuer: "letsencrypt"
+    nginx.org/client-max-body-size: "500m"
+    nginx.org/hsts: "True"
+    nginx.org/hsts-max-age: "15552000"
+    nginx.org/proxy-connect-timeout: "60s"
+    nginx.org/proxy-read-timeout: "60s"
+  hosts:
+    - host: "llm.dodges.it"
+      paths:
+        - path: "/"
+          pathType: "Prefix"
+  tls:
+    - hosts:
+      - llm.dodges.it
+      secretName: llm-le-secret
+persistentVolume:
+  enabled: "true"
+  storageClass: "nfs-client"
+nodeSelector:
+  nvidia.com/gpu.present: "true"