mirror of
https://gitea.fenix-dev.com/fenix-gitea-admin/iac-ansible-private.git
synced 2026-02-04 15:53:28 +00:00
98 lines
2.0 KiB
YAML
98 lines
2.0 KiB
YAML
apiVersion: metallb.io/v1beta1
|
|
kind: IPAddressPool
|
|
metadata:
|
|
name: local-pool-2
|
|
namespace: metallb-system
|
|
spec:
|
|
addresses:
|
|
- 192.168.1.100-192.168.1.200
|
|
---
|
|
apiVersion: apps/v1
|
|
kind: Deployment
|
|
metadata:
|
|
name: ollama-deployment
|
|
namespace: fenix-ai
|
|
spec:
|
|
replicas: 1
|
|
selector:
|
|
matchLabels:
|
|
app: ollama
|
|
template:
|
|
metadata:
|
|
labels:
|
|
app: ollama
|
|
spec:
|
|
containers:
|
|
- name: ollama
|
|
image: ollama/ollama:latest
|
|
ports:
|
|
- containerPort: 11434
|
|
resources:
|
|
limits:
|
|
nvidia.com/gpu: 1 # garante uso da tua RTX 4060 Ti
|
|
env:
|
|
- name: OLLAMA_HOST
|
|
value: "0.0.0.0"
|
|
volumeMounts:
|
|
- name: ollama-data
|
|
mountPath: /root/.ollama
|
|
volumes:
|
|
- name: ollama-data
|
|
persistentVolumeClaim:
|
|
claimName: ollama-data-pvc
|
|
---
|
|
apiVersion: v1
|
|
kind: Service
|
|
metadata:
|
|
name: ollama-api-svc
|
|
namespace: fenix-ai
|
|
spec:
|
|
selector:
|
|
app: ollama
|
|
ports:
|
|
- protocol: TCP
|
|
port: 11434
|
|
targetPort: 11434
|
|
type: NodePort # ou LoadBalancer se tiveres suporte
|
|
---
|
|
apiVersion: v1
|
|
kind: PersistentVolume
|
|
metadata:
|
|
name: ollama-data-pv
|
|
namespace: fenix-ai
|
|
spec:
|
|
capacity:
|
|
storage: 20Gi
|
|
storageClassName: ollama-ai-nfs-csi
|
|
accessModes:
|
|
- ReadWriteOnce
|
|
persistentVolumeReclaimPolicy: Retain
|
|
nfs:
|
|
server: 192.168.1.22
|
|
path: /mnt/fenix-main-nas-pool-0/data/k8s-Volumes/k8s-cluster-iac-deployed/ollama
|
|
---
|
|
apiVersion: v1
|
|
kind: PersistentVolumeClaim
|
|
metadata:
|
|
name: ollama-data-pvc
|
|
namespace: fenix-ai
|
|
spec:
|
|
storageClassName: ollama-ai-nfs-csi
|
|
accessModes:
|
|
- ReadWriteOnce
|
|
resources:
|
|
requests:
|
|
storage: 20Gi
|
|
---
|
|
apiVersion: storage.k8s.io/v1
|
|
kind: StorageClass
|
|
metadata:
|
|
name: ollama-ai-nfs-csi
|
|
namespace: fenix-ai
|
|
provisioner: nfs.csi.k8s.io
|
|
parameters:
|
|
server: 192.168.1.22
|
|
share: /mnt/fenix-main-nas-pool-0/data/k8s-Volumes/k8s-cluster-iac-deployed/ollama
|
|
allowVolumeExpansion: true
|
|
reclaimPolicy: Retain
|