apiVersion: apps/v1 kind: Deployment metadata: name: open-webui namespace: llm spec: replicas: 1 selector: matchLabels: name: open-webui template: metadata: labels: name: open-webui nginx: backend spec: containers: - name: open-webui image: ghcr.io/open-webui/open-webui:main imagePullPolicy: "Always" env: - name: OLLAMA_BASE_URL value: "https://llm.dodges.it" volumeMounts: - name: data mountPath: /app/backend/data ports: - containerPort: 8080 name: http livenessProbe: httpGet: port: 8080 httpHeaders: - name: Host value: "gpt.dodges.it" volumes: - name: data persistentVolumeClaim: claimName: open-webui-data --- apiVersion: v1 kind: Service metadata: name: open-webui namespace: llm spec: type: ClusterIP ipFamilyPolicy: PreferDualStack selector: name: open-webui ports: - protocol: TCP port: 8080 targetPort: 8080 name: http --- apiVersion: v1 kind: PersistentVolumeClaim metadata: name: open-webui-data namespace: llm spec: storageClassName: nfs-vrt accessModes: - ReadWriteMany resources: requests: storage: 200Gi --- apiVersion: networking.k8s.io/v1 kind: Ingress metadata: name: open-webui-ingress namespace: llm annotations: cert-manager.io/cluster-issuer: "letsencrypt" acme.cert-manager.io/http01-edit-in-place: "true" nginx.org/client-max-body-size: "500m" nginx.org/proxy-connect-timeout: "60s" nginx.org/proxy-read-timeout: "60s" nginx.org/hsts: "True" spec: tls: - hosts: - gpt.dodges.it secretName: open-webui-le-secret rules: - host: gpt.dodges.it http: paths: - path: / pathType: Prefix backend: service: name: open-webui port: number: 8080 ingressClassName: nginx