mirror of
https://gitea.fenix-dev.com/fenix-gitea-admin/iac-ansible-private.git
synced 2026-05-14 08:35:22 +00:00
lidarr ollama ui, soulseek
This commit is contained in:
@ -9,6 +9,93 @@ spec:
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ai-openwebui-deployment
|
||||
namespace: fenix-ai
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: ai-openwebui
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: ai-openwebui
|
||||
spec:
|
||||
containers:
|
||||
- name: ai-openwebui
|
||||
image: ghcr.io/open-webui/open-webui:cuda
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
env:
|
||||
- name: WEBUI_HOST
|
||||
value: "0.0.0.0"
|
||||
- name: OLLAMA_BASE_URL
|
||||
value: "http://ollama-api-svc.fenix-ai.svc.cluster.local:11434"
|
||||
volumeMounts:
|
||||
- name: ai-openwebui-data
|
||||
mountPath: /app/backend/data
|
||||
volumes:
|
||||
- name: ai-openwebui-data
|
||||
persistentVolumeClaim:
|
||||
claimName: ai-openwebui-data-pvc
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolume
|
||||
metadata:
|
||||
name: ai-openwebui-data-pv
|
||||
namespace: fenix-ai
|
||||
spec:
|
||||
capacity:
|
||||
storage: 40Gi
|
||||
storageClassName: ai-openwebui-nfs-csi
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
persistentVolumeReclaimPolicy: Retain
|
||||
nfs:
|
||||
server: 192.168.1.22
|
||||
path: /mnt/fenix-main-nas-pool-0/data/k8s-Volumes/k8s-cluster-iac-deployed/ai-openwebui
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: ai-openwebui-data-pvc
|
||||
namespace: fenix-ai
|
||||
spec:
|
||||
storageClassName: ai-openwebui-nfs-csi
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 40Gi
|
||||
---
|
||||
apiVersion: storage.k8s.io/v1
|
||||
kind: StorageClass
|
||||
metadata:
|
||||
name: ai-openwebui-nfs-csi
|
||||
namespace: fenix-ai
|
||||
provisioner: nfs.csi.k8s.io
|
||||
parameters:
|
||||
server: 192.168.1.22
|
||||
share: /mnt/fenix-main-nas-pool-0/data/k8s-Volumes/k8s-cluster-iac-deployed/ai-openwebui
|
||||
allowVolumeExpansion: true
|
||||
reclaimPolicy: Retain
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: ai-openwebui-svc
|
||||
namespace: fenix-ai
|
||||
spec:
|
||||
selector:
|
||||
app: ai-openwebui
|
||||
ports:
|
||||
- port: 8080
|
||||
targetPort: 8080
|
||||
type: ClusterIP # ou LoadBalancer se tiveres suporte
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ollama-deployment
|
||||
namespace: fenix-ai
|
||||
@ -30,9 +117,16 @@ spec:
|
||||
resources:
|
||||
limits:
|
||||
nvidia.com/gpu: 1 # garante uso da tua RTX 4060 Ti
|
||||
memory: 17Gi
|
||||
env:
|
||||
- name: OLLAMA_HOST
|
||||
value: "0.0.0.0"
|
||||
- name: NVIDIA_VISIBLE_DEVICES
|
||||
value: "all"
|
||||
- name: NVIDIA_DRIVER_CAPABILITIES
|
||||
value: "compute,utility"
|
||||
- name: CUDA_VISIBLE_DEVICES
|
||||
value: "0"
|
||||
volumeMounts:
|
||||
- name: ollama-data
|
||||
mountPath: /root/.ollama
|
||||
@ -62,7 +156,7 @@ metadata:
|
||||
namespace: fenix-ai
|
||||
spec:
|
||||
capacity:
|
||||
storage: 20Gi
|
||||
storage: 40Gi
|
||||
storageClassName: ollama-ai-nfs-csi
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
@ -82,7 +176,7 @@ spec:
|
||||
- ReadWriteOnce
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
storage: 40Gi
|
||||
---
|
||||
apiVersion: storage.k8s.io/v1
|
||||
kind: StorageClass
|
||||
|
||||
Reference in New Issue
Block a user