Compare commits
196 Commits
auto-updat
...
auto-updat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dd1017789f | ||
| 07620e8651 | |||
| 2485e1e951 | |||
| 3a88233167 | |||
| 88095047df | |||
| 07829e8294 | |||
| 774f9a43a7 | |||
| 3132e71569 | |||
| 69a74afce5 | |||
| 095c098a49 | |||
| c19a086b38 | |||
| bf33765302 | |||
|
|
c28566ce21 | ||
|
|
1b2b4da98d | ||
|
|
014284c11d | ||
|
|
fc00513db3 | ||
|
|
1451a5fb37 | ||
|
|
d19ae33cd1 | ||
|
|
8a8cab019f | ||
|
|
137384ce55 | ||
|
|
1aee4d5cd7 | ||
|
|
9d6fa51fc7 | ||
| fc689d5e22 | |||
| a2f4f989e7 | |||
| cacc5ef02b | |||
| f05a1515e6 | |||
| dbb9722840 | |||
| e7e066587f | |||
| cb83a3fa38 | |||
|
|
4b3e1a10d4 | ||
|
|
caf024aaa2 | ||
|
|
f4c1a4b310 | ||
|
|
f6623efab1 | ||
|
|
52cea30ac3 | ||
|
|
67bcf5247e | ||
|
|
e38f18d9a8 | ||
|
|
67bdb8ea29 | ||
|
|
1e40073cb7 | ||
|
|
82e9b336dc | ||
|
|
afbf68c6fa | ||
|
|
f6be70e1ca | ||
|
|
02dff40276 | ||
|
|
e5d9a78699 | ||
|
|
1221dbf7b5 | ||
|
|
42ebe4cbda | ||
|
|
4059bc1a70 | ||
|
|
65f8056ef7 | ||
|
|
8fca12c674 | ||
|
|
51cc40377c | ||
|
|
ff58069789 | ||
|
|
6b5a120fc4 | ||
|
|
499da735f7 | ||
|
|
3054a9242b | ||
|
|
4d095e2773 | ||
|
|
09562a6cb9 | ||
|
|
b81087515d | ||
|
|
39232d422d | ||
|
|
40b565b5c8 | ||
|
|
a7aaa3e4a5 | ||
|
|
5f882c7beb | ||
|
|
72cf9902d4 | ||
|
|
a4b2eb8ab9 | ||
|
|
80b7b0a7f7 | ||
|
|
ecd475e83d | ||
| 69aed3fe23 | |||
|
|
d74479c935 | ||
|
|
e5ad65b63b | ||
|
|
0b143959b9 | ||
|
|
d35da03726 | ||
|
|
d5a666ac62 | ||
| cb2a4384f3 | |||
| 9ec3e8ef56 | |||
|
|
02986ae5b6 | ||
|
|
be766e24c8 | ||
|
|
4c1f959d62 | ||
|
|
cb9d027757 | ||
|
|
4981fef85d | ||
|
|
49515d6081 | ||
|
|
d0895497fb | ||
|
|
291fafad58 | ||
|
|
48ee1fcd10 | ||
| 2bcc0f9414 | |||
| dd191c1c6e | |||
| 0a101d7b98 | |||
| 98d6d53b09 | |||
| 0f28d93647 | |||
| 16b3a7fdcb | |||
|
|
8952396c9b | ||
| 88e5b5f1b7 | |||
|
|
6f8ca40108 | ||
|
|
da37ae71be | ||
|
|
a4a1fecbd1 | ||
|
|
3564f5d9c3 | ||
|
|
9df0a3c8b7 | ||
| 3157b0c325 | |||
|
|
cbe1c23709 | ||
|
|
70198ca1c2 | ||
|
|
39207fcb39 | ||
|
|
bae89f8738 | ||
|
|
72950dae83 | ||
| 9096b4bcf7 | |||
|
|
8b6b0a0cd6 | ||
| 9024a41a4c | |||
| 2b5e76e24d | |||
|
|
651acf665e | ||
| 7bd482a000 | |||
| 6a0be650ea | |||
| b78efdb81a | |||
| 7bf27506b0 | |||
| 5e3be29b7a | |||
| 44ce19b815 | |||
| 434b947896 | |||
| b13107882c | |||
| f76d44ce98 | |||
| 43c0fdf2f2 | |||
| 512eaf842e | |||
| 91cd967989 | |||
| d092401cd6 | |||
| 5b2768ad4f | |||
| a7d71e40a3 | |||
| 9cbaa23aed | |||
|
|
0b3fddbd40 | ||
|
|
ee0c55dcea | ||
|
|
f545a47162 | ||
|
|
fdbeb1a9a7 | ||
|
|
0fbeb96a6a | ||
|
|
0eba6143f4 | ||
|
|
b71f54f714 | ||
|
|
9a3bdfaf9c | ||
|
|
cfa275f4fc | ||
|
|
4a887b6775 | ||
| 981aa2ba15 | |||
| 872c0f4adf | |||
| 5b1ff26975 | |||
| 93bf782ece | |||
| f153bfc0b4 | |||
| 6b60fca39c | |||
| abb47a6db0 | |||
| e008ac3f59 | |||
| c945575ea1 | |||
| 01348dd99e | |||
| 0b4507a72d | |||
| 5cca64813a | |||
| 837094944e | |||
| 7da2fab580 | |||
| 007df29133 | |||
| b25a82ba1e | |||
| 5e7e9031a3 | |||
| 70ae7c9a50 | |||
| d95faaf2c1 | |||
| af7e29c063 | |||
| 4ea48f0f94 | |||
| 4bfc35d8e2 | |||
| 46c0fab78a | |||
| 6dc43149f4 | |||
| ca1efe6230 | |||
| e90d2c9dc5 | |||
| a884c2b969 | |||
| db92976872 | |||
|
|
d924ebd3ee | ||
|
|
4b30185655 | ||
|
|
a65b37f000 | ||
|
|
f394b4f9da | ||
| 5d12fc854a | |||
|
|
f415e0711e | ||
| 14dc69904c | |||
| f6dc7aa6e3 | |||
| badd82f9af | |||
| a5cb49471a | |||
| 79c23e14b0 | |||
| 5bc44e45b0 | |||
|
|
4a80f2f596 | ||
| b58461232c | |||
| be6e601275 | |||
| 063a4a502b | |||
|
|
22382b63a1 | ||
|
|
718709115f | ||
|
|
df78728137 | ||
|
|
9671dc3aa8 | ||
|
|
609fd9d522 | ||
|
|
0e66f4bddd | ||
| 9f35d520b6 | |||
| 512e993f7b | |||
| b6b6e23026 | |||
| bd997082e9 | |||
| 0689b6abc8 | |||
| bbeac646e9 | |||
| 5d4d620681 | |||
| 5cf2a32ca0 | |||
| 35c072471a | |||
| 9c0458c020 | |||
| 28d5bcfabd | |||
| 65b2aaa91d | |||
| 50e54e5692 | |||
| b79aa17856 | |||
| d91b0928b7 |
@@ -22,12 +22,13 @@ jobs:
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
pip install pyyaml
|
||||
python3 -m venv .venv
|
||||
.venv/bin/pip install pyyaml
|
||||
|
||||
- name: Generate K8s Services Wiki
|
||||
run: |
|
||||
echo "📋 Starting K8s wiki generation..."
|
||||
python3 .gitea/scripts/generate-k8s-wiki.py k8s/ Kubernetes-Services.md
|
||||
.venv/bin/python .gitea/scripts/generate-k8s-wiki.py k8s/ Kubernetes-Services.md
|
||||
|
||||
if [ -f "Kubernetes-Services.md" ]; then
|
||||
echo "✅ Wiki content generated successfully"
|
||||
|
||||
@@ -18,6 +18,7 @@ ArgoCD homelab project
|
||||
| **external-secrets** | [](https://ag.hexor.cy/applications/argocd/external-secrets) |
|
||||
| **kube-system-custom** | [](https://ag.hexor.cy/applications/argocd/kube-system-custom) |
|
||||
| **kubernetes-dashboard** | [](https://ag.hexor.cy/applications/argocd/kubernetes-dashboard) |
|
||||
| **longhorn** | [](https://ag.hexor.cy/applications/argocd/longhorn) |
|
||||
| **postgresql** | [](https://ag.hexor.cy/applications/argocd/postgresql) |
|
||||
| **prom-stack** | [](https://ag.hexor.cy/applications/argocd/prom-stack) |
|
||||
| **system-upgrade** | [](https://ag.hexor.cy/applications/argocd/system-upgrade) |
|
||||
@@ -44,6 +45,8 @@ ArgoCD homelab project
|
||||
| **jellyfin** | [](https://ag.hexor.cy/applications/argocd/jellyfin) |
|
||||
| **k8s-secrets** | [](https://ag.hexor.cy/applications/argocd/k8s-secrets) |
|
||||
| **khm** | [](https://ag.hexor.cy/applications/argocd/khm) |
|
||||
| **mtproxy** | [](https://ag.hexor.cy/applications/argocd/mtproxy) |
|
||||
| **n8n** | [](https://ag.hexor.cy/applications/argocd/n8n) |
|
||||
| **ollama** | [](https://ag.hexor.cy/applications/argocd/ollama) |
|
||||
| **paperless** | [](https://ag.hexor.cy/applications/argocd/paperless) |
|
||||
| **pasarguard** | [](https://ag.hexor.cy/applications/argocd/pasarguard) |
|
||||
|
||||
@@ -77,8 +77,8 @@ spec:
|
||||
labels:
|
||||
app: gitea-runner
|
||||
spec:
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: home.homenet
|
||||
#nodeSelector:
|
||||
# kubernetes.io/hostname: home.homenet
|
||||
volumes:
|
||||
- name: docker-sock
|
||||
hostPath:
|
||||
@@ -90,27 +90,30 @@ spec:
|
||||
affinity:
|
||||
nodeAffinity:
|
||||
preferredDuringSchedulingIgnoredDuringExecution:
|
||||
- weight: 3
|
||||
preference:
|
||||
matchExpressions:
|
||||
- key: kubernetes.io/hostname
|
||||
operator: In
|
||||
values:
|
||||
- home.homenet
|
||||
- weight: 1
|
||||
preference:
|
||||
matchExpressions:
|
||||
- key: kubernetes.io/hostname
|
||||
operator: In
|
||||
values:
|
||||
- master.tail2fe2d.ts.net
|
||||
- home.homenet
|
||||
- weight: 2
|
||||
preference:
|
||||
matchExpressions:
|
||||
- key: kubernetes.io/hostname
|
||||
operator: In
|
||||
values:
|
||||
- nas.homenet
|
||||
- master.tail2fe2d.ts.net
|
||||
- weight: 3
|
||||
preference:
|
||||
matchExpressions:
|
||||
- key: kubernetes.io/hostname
|
||||
operator: In
|
||||
values:
|
||||
- it.tail2fe2d.ts.net
|
||||
- ch.tail2fe2d.ts.net
|
||||
- us.tail2fe2d.ts.net
|
||||
|
||||
requiredDuringSchedulingIgnoredDuringExecution:
|
||||
nodeSelectorTerms:
|
||||
- matchExpressions:
|
||||
@@ -118,7 +121,9 @@ spec:
|
||||
operator: In
|
||||
values:
|
||||
- home.homenet
|
||||
- nas.homenet
|
||||
- it.tail2fe2d.ts.net
|
||||
- ch.tail2fe2d.ts.net
|
||||
- us.tail2fe2d.ts.net
|
||||
- master.tail2fe2d.ts.net
|
||||
containers:
|
||||
- name: gitea-runner
|
||||
|
||||
@@ -74,19 +74,14 @@ spec:
|
||||
- nas.homenet
|
||||
volumes:
|
||||
- name: upload-storage
|
||||
nfs:
|
||||
server: nas.homenet
|
||||
path: /mnt/storage/Storage/k8s/immich/library/
|
||||
readOnly: false
|
||||
persistentVolumeClaim:
|
||||
claimName: immich-upload-pvc
|
||||
- name: gphoto-storage
|
||||
nfs:
|
||||
server: nas.homenet
|
||||
path: /mnt/storage/Storage/k8s/immich/GPHOTO/
|
||||
readOnly: false
|
||||
persistentVolumeClaim:
|
||||
claimName: immich-gphoto-pvc
|
||||
- name: camera
|
||||
nfs:
|
||||
server: nas.homenet
|
||||
path: /mnt/storage/Storage/Syncthing-repos/PhoneCamera/
|
||||
persistentVolumeClaim:
|
||||
claimName: immich-camera-pvc
|
||||
readOnly: true
|
||||
- name: localtime
|
||||
hostPath:
|
||||
|
||||
@@ -1,79 +1,52 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolume
|
||||
metadata:
|
||||
name: immich-upload-pv
|
||||
spec:
|
||||
capacity:
|
||||
storage: 500Gi
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
hostPath:
|
||||
path: /mnt/storage/Storage/k8s/immich/library
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: immich-upload-pvc
|
||||
namespace: immich
|
||||
spec:
|
||||
storageClassName: ""
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
volumeName: immich-upload-pv
|
||||
- ReadWriteMany
|
||||
storageClassName: nfs-csi
|
||||
resources:
|
||||
requests:
|
||||
storage: 500Gi
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolume
|
||||
metadata:
|
||||
name: immich-gphoto-pv
|
||||
spec:
|
||||
capacity:
|
||||
storage: 500Gi
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
hostPath:
|
||||
path: /mnt/storage/Storage/k8s/immich/GPHOTO
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: immich-gphoto-pvc
|
||||
namespace: immich
|
||||
spec:
|
||||
storageClassName: ""
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
volumeName: immich-gphoto-pv
|
||||
- ReadWriteMany
|
||||
storageClassName: nfs-csi
|
||||
resources:
|
||||
requests:
|
||||
storage: 500Gi
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolume
|
||||
metadata:
|
||||
name: immich-db-pv
|
||||
spec:
|
||||
capacity:
|
||||
storage: 10Gi
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
hostPath:
|
||||
path: /mnt/storage/Storage/k8s/immich/db
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: immich-db-pvc
|
||||
namespace: immich
|
||||
spec:
|
||||
storageClassName: ""
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
volumeName: immich-db-pv
|
||||
- ReadWriteMany
|
||||
storageClassName: nfs-csi
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: immich-camera-pvc
|
||||
namespace: immich
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadOnlyMany
|
||||
storageClassName: nfs-csi
|
||||
resources:
|
||||
requests:
|
||||
storage: 100Gi
|
||||
|
||||
@@ -19,7 +19,7 @@ spec:
|
||||
kubernetes.io/os: linux
|
||||
containers:
|
||||
- name: secret-reader
|
||||
image: ultradesu/k8s-secrets:0.1.1
|
||||
image: ultradesu/k8s-secrets:0.2.1
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- "--secrets"
|
||||
@@ -28,6 +28,7 @@ spec:
|
||||
- "k8s-secret"
|
||||
- "--port"
|
||||
- "3000"
|
||||
- "--webhook"
|
||||
ports:
|
||||
- containerPort: 3000
|
||||
name: http
|
||||
|
||||
20
k8s/apps/mtproxy/app.yaml
Normal file
20
k8s/apps/mtproxy/app.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: Application
|
||||
metadata:
|
||||
name: mtproxy
|
||||
namespace: argocd
|
||||
spec:
|
||||
project: apps
|
||||
destination:
|
||||
namespace: mtproxy
|
||||
server: https://kubernetes.default.svc
|
||||
source:
|
||||
repoURL: ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
targetRevision: HEAD
|
||||
path: k8s/apps/mtproxy
|
||||
syncPolicy:
|
||||
automated:
|
||||
selfHeal: true
|
||||
prune: true
|
||||
syncOptions:
|
||||
- CreateNamespace=true
|
||||
49
k8s/apps/mtproxy/deployment.yaml
Normal file
49
k8s/apps/mtproxy/deployment.yaml
Normal file
@@ -0,0 +1,49 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: mtproxy
|
||||
labels:
|
||||
app: mtproxy
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: mtproxy
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: mtproxy
|
||||
spec:
|
||||
affinity:
|
||||
nodeAffinity:
|
||||
requiredDuringSchedulingIgnoredDuringExecution:
|
||||
nodeSelectorTerms:
|
||||
- matchExpressions:
|
||||
- key: xray-node-address
|
||||
operator: Exists
|
||||
containers:
|
||||
- name: mtproxy
|
||||
image: telegrammessenger/proxy:latest
|
||||
imagePullPolicy: Always
|
||||
ports:
|
||||
- name: proxy
|
||||
containerPort: 443
|
||||
protocol: TCP
|
||||
env:
|
||||
- name: SECRET
|
||||
value: "00baadf00d15abad1deaa51abaadcafe"
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /data
|
||||
resources:
|
||||
requests:
|
||||
memory: "128Mi"
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "256Mi"
|
||||
cpu: "500m"
|
||||
volumes:
|
||||
- name: data
|
||||
persistentVolumeClaim:
|
||||
claimName: mtproxy-data
|
||||
8
k8s/apps/mtproxy/kustomization.yaml
Normal file
8
k8s/apps/mtproxy/kustomization.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- ./app.yaml
|
||||
- ./deployment.yaml
|
||||
- ./service.yaml
|
||||
- ./storage.yaml
|
||||
14
k8s/apps/mtproxy/service.yaml
Normal file
14
k8s/apps/mtproxy/service.yaml
Normal file
@@ -0,0 +1,14 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: mtproxy
|
||||
spec:
|
||||
type: LoadBalancer
|
||||
selector:
|
||||
app: mtproxy
|
||||
ports:
|
||||
- name: proxy
|
||||
port: 30443
|
||||
targetPort: 443
|
||||
protocol: TCP
|
||||
12
k8s/apps/mtproxy/storage.yaml
Normal file
12
k8s/apps/mtproxy/storage.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: mtproxy-data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
storageClassName: longhorn
|
||||
resources:
|
||||
requests:
|
||||
storage: 1Gi
|
||||
21
k8s/apps/n8n/app.yaml
Normal file
21
k8s/apps/n8n/app.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: Application
|
||||
metadata:
|
||||
name: n8n
|
||||
namespace: argocd
|
||||
spec:
|
||||
project: apps
|
||||
destination:
|
||||
namespace: n8n
|
||||
server: https://kubernetes.default.svc
|
||||
source:
|
||||
repoURL: ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
targetRevision: HEAD
|
||||
path: k8s/apps/n8n
|
||||
syncPolicy:
|
||||
automated:
|
||||
selfHeal: true
|
||||
prune: true
|
||||
syncOptions:
|
||||
- CreateNamespace=true
|
||||
|
||||
153
k8s/apps/n8n/deployment-main.yaml
Normal file
153
k8s/apps/n8n/deployment-main.yaml
Normal file
@@ -0,0 +1,153 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: n8n-main
|
||||
labels:
|
||||
app: n8n
|
||||
component: main
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: n8n
|
||||
component: main
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: n8n
|
||||
component: main
|
||||
spec:
|
||||
serviceAccountName: n8n
|
||||
initContainers:
|
||||
- name: install-tools
|
||||
image: alpine:3.22
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- |
|
||||
set -e
|
||||
if [ -x /tools/kubectl ]; then
|
||||
echo "kubectl already exists, skipping download"
|
||||
/tools/kubectl version --client
|
||||
exit 0
|
||||
fi
|
||||
echo "Downloading kubectl..."
|
||||
ARCH=$(uname -m)
|
||||
case $ARCH in
|
||||
x86_64) ARCH="amd64" ;;
|
||||
aarch64) ARCH="arm64" ;;
|
||||
esac
|
||||
wget -O /tools/kubectl "https://dl.k8s.io/release/$(wget -qO- https://dl.k8s.io/release/stable.txt)/bin/linux/${ARCH}/kubectl"
|
||||
chmod +x /tools/kubectl
|
||||
/tools/kubectl version --client
|
||||
volumeMounts:
|
||||
- name: tools
|
||||
mountPath: /tools
|
||||
securityContext:
|
||||
runAsUser: 1000
|
||||
runAsGroup: 1000
|
||||
runAsNonRoot: true
|
||||
containers:
|
||||
- name: n8n
|
||||
image: docker.n8n.io/n8nio/n8n:latest
|
||||
ports:
|
||||
- containerPort: 5678
|
||||
name: http
|
||||
env:
|
||||
- name: PATH
|
||||
value: "/opt/tools:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||
- name: HOME
|
||||
value: "/home/node"
|
||||
- name: N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS
|
||||
value: "true"
|
||||
- name: NODES_EXCLUDE
|
||||
value: "[]"
|
||||
- name: N8N_HOST
|
||||
value: "n8n.hexor.cy"
|
||||
- name: N8N_PORT
|
||||
value: "5678"
|
||||
- name: N8N_PROTOCOL
|
||||
value: "https"
|
||||
- name: N8N_RUNNERS_ENABLED
|
||||
value: "true"
|
||||
- name: N8N_RUNNERS_MODE
|
||||
value: "external"
|
||||
- name: EXECUTIONS_MODE
|
||||
value: "queue"
|
||||
- name: QUEUE_BULL_REDIS_HOST
|
||||
value: "n8n-redis"
|
||||
- name: NODE_ENV
|
||||
value: "production"
|
||||
- name: WEBHOOK_URL
|
||||
value: "https://n8n.hexor.cy/"
|
||||
- name: GENERIC_TIMEZONE
|
||||
value: "Europe/Moscow"
|
||||
- name: TZ
|
||||
value: "Europe/Moscow"
|
||||
- name: DB_TYPE
|
||||
value: "postgresdb"
|
||||
- name: DB_POSTGRESDB_HOST
|
||||
value: "psql.psql.svc"
|
||||
- name: DB_POSTGRESDB_DATABASE
|
||||
value: "n8n"
|
||||
- name: DB_POSTGRESDB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: credentials
|
||||
key: username
|
||||
- name: DB_POSTGRESDB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: credentials
|
||||
key: password
|
||||
- name: N8N_ENCRYPTION_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: credentials
|
||||
key: encryptionkey
|
||||
- name: N8N_RUNNERS_AUTH_TOKEN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: credentials
|
||||
key: runnertoken
|
||||
volumeMounts:
|
||||
- name: n8n-data
|
||||
mountPath: /home/node/.n8n
|
||||
- name: tools
|
||||
mountPath: /opt/tools
|
||||
resources:
|
||||
requests:
|
||||
cpu: 2000m
|
||||
memory: 512Mi
|
||||
limits:
|
||||
cpu: 4000m
|
||||
memory: 2048Gi
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /healthz
|
||||
port: http
|
||||
initialDelaySeconds: 120
|
||||
periodSeconds: 30
|
||||
timeoutSeconds: 10
|
||||
failureThreshold: 6
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /healthz/readiness
|
||||
port: http
|
||||
initialDelaySeconds: 60
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 10
|
||||
volumes:
|
||||
- name: n8n-data
|
||||
persistentVolumeClaim:
|
||||
claimName: n8n-data
|
||||
- name: tools
|
||||
persistentVolumeClaim:
|
||||
claimName: n8n-tools
|
||||
securityContext:
|
||||
runAsUser: 1000
|
||||
runAsGroup: 1000
|
||||
runAsNonRoot: true
|
||||
fsGroup: 1000
|
||||
112
k8s/apps/n8n/deployment-worker.yaml
Normal file
112
k8s/apps/n8n/deployment-worker.yaml
Normal file
@@ -0,0 +1,112 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: n8n-worker
|
||||
labels:
|
||||
app: n8n
|
||||
component: worker
|
||||
spec:
|
||||
replicas: 2
|
||||
selector:
|
||||
matchLabels:
|
||||
app: n8n
|
||||
component: worker
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: n8n
|
||||
component: worker
|
||||
spec:
|
||||
serviceAccountName: n8n
|
||||
containers:
|
||||
- name: n8n-worker
|
||||
image: docker.n8n.io/n8nio/n8n:latest
|
||||
command: ["n8n", "worker"]
|
||||
env:
|
||||
- name: PATH
|
||||
value: "/opt/tools:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
||||
- name: HOME
|
||||
value: "/home/node"
|
||||
- name: NODES_EXCLUDE
|
||||
value: "[]"
|
||||
- name: N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS
|
||||
value: "true"
|
||||
- name: N8N_RUNNERS_ENABLED
|
||||
value: "true"
|
||||
- name: N8N_RUNNERS_MODE
|
||||
value: "external"
|
||||
- name: N8N_PORT
|
||||
value: "80"
|
||||
- name: EXECUTIONS_MODE
|
||||
value: "queue"
|
||||
- name: QUEUE_BULL_REDIS_HOST
|
||||
value: "n8n-redis"
|
||||
- name: N8N_RUNNERS_TASK_BROKER_URI
|
||||
value: "http://n8n:80"
|
||||
- name: NODE_ENV
|
||||
value: "production"
|
||||
- name: GENERIC_TIMEZONE
|
||||
value: "Europe/Moscow"
|
||||
- name: TZ
|
||||
value: "Europe/Moscow"
|
||||
- name: DB_TYPE
|
||||
value: "postgresdb"
|
||||
- name: DB_POSTGRESDB_HOST
|
||||
value: "psql.psql.svc"
|
||||
- name: DB_POSTGRESDB_DATABASE
|
||||
value: "n8n"
|
||||
- name: DB_POSTGRESDB_USER
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: credentials
|
||||
key: username
|
||||
- name: DB_POSTGRESDB_PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: credentials
|
||||
key: password
|
||||
- name: N8N_ENCRYPTION_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: credentials
|
||||
key: encryptionkey
|
||||
- name: N8N_RUNNERS_AUTH_TOKEN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: credentials
|
||||
key: runnertoken
|
||||
volumeMounts:
|
||||
- name: n8n-data
|
||||
mountPath: /home/node/.n8n
|
||||
- name: tools
|
||||
mountPath: /opt/tools
|
||||
resources:
|
||||
requests:
|
||||
cpu: 2000m
|
||||
memory: 512Mi
|
||||
limits:
|
||||
cpu: 4000m
|
||||
memory: 2048Gi
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- "ps aux | grep '[n]8n worker' || exit 1"
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 30
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
volumes:
|
||||
- name: n8n-data
|
||||
persistentVolumeClaim:
|
||||
claimName: n8n-data
|
||||
- name: tools
|
||||
persistentVolumeClaim:
|
||||
claimName: n8n-tools
|
||||
securityContext:
|
||||
runAsUser: 1000
|
||||
runAsGroup: 1000
|
||||
runAsNonRoot: true
|
||||
fsGroup: 1000
|
||||
50
k8s/apps/n8n/external-secrets.yaml
Normal file
50
k8s/apps/n8n/external-secrets.yaml
Normal file
@@ -0,0 +1,50 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: credentials
|
||||
spec:
|
||||
target:
|
||||
name: credentials
|
||||
deletionPolicy: Delete
|
||||
template:
|
||||
type: Opaque
|
||||
data:
|
||||
password: "{{ .psql | trim }}"
|
||||
username: "n8n"
|
||||
encryptionkey: "{{ .enc_pass | trim }}"
|
||||
runnertoken: "{{ .runner_token | trim }}"
|
||||
data:
|
||||
- secretKey: psql
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[13].value
|
||||
- secretKey: enc_pass
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: 18c92d73-9637-4419-8642-7f7b308460cb
|
||||
property: fields[0].value
|
||||
- secretKey: runner_token
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: 18c92d73-9637-4419-8642-7f7b308460cb
|
||||
property: fields[1].value
|
||||
28
k8s/apps/n8n/ingress.yaml
Normal file
28
k8s/apps/n8n/ingress.yaml
Normal file
@@ -0,0 +1,28 @@
|
||||
---
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: n8n
|
||||
labels:
|
||||
app: n8n
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: letsencrypt
|
||||
traefik.ingress.kubernetes.io/router.middlewares: kube-system-https-redirect@kubernetescrd
|
||||
traefik.ingress.kubernetes.io/router.tls: "true"
|
||||
spec:
|
||||
ingressClassName: traefik
|
||||
tls:
|
||||
- hosts:
|
||||
- n8n.hexor.cy
|
||||
secretName: n8n-tls
|
||||
rules:
|
||||
- host: n8n.hexor.cy
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: n8n
|
||||
port:
|
||||
number: 80
|
||||
25
k8s/apps/n8n/kustomization.yaml
Normal file
25
k8s/apps/n8n/kustomization.yaml
Normal file
@@ -0,0 +1,25 @@
|
||||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- external-secrets.yaml
|
||||
- storage.yaml
|
||||
- rbac.yaml
|
||||
- redis-deployment.yaml
|
||||
- redis-service.yaml
|
||||
- deployment-main.yaml
|
||||
- deployment-worker.yaml
|
||||
- service.yaml
|
||||
- ingress.yaml
|
||||
|
||||
helmCharts:
|
||||
- name: yacy
|
||||
repo: https://gt.hexor.cy/api/packages/ab/helm
|
||||
version: 0.1.2
|
||||
releaseName: yacy
|
||||
namespace: n8n
|
||||
valuesFile: values-yacy.yaml
|
||||
includeCRDs: true
|
||||
|
||||
commonLabels:
|
||||
app.kubernetes.io/name: n8n
|
||||
45
k8s/apps/n8n/rbac.yaml
Normal file
45
k8s/apps/n8n/rbac.yaml
Normal file
@@ -0,0 +1,45 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: n8n
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: n8n-clusterrole
|
||||
rules:
|
||||
# Core API group ("")
|
||||
- apiGroups: [""]
|
||||
resources: ["*"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
|
||||
# Common built-in API groups
|
||||
- apiGroups: ["apps", "batch", "autoscaling", "extensions", "policy"]
|
||||
resources: ["*"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
|
||||
- apiGroups: ["networking.k8s.io", "rbac.authorization.k8s.io", "apiextensions.k8s.io"]
|
||||
resources: ["*"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
|
||||
- apiGroups: ["coordination.k8s.io", "discovery.k8s.io", "events.k8s.io"]
|
||||
resources: ["*"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
|
||||
- apiGroups: ["storage.k8s.io", "admissionregistration.k8s.io", "authentication.k8s.io", "authorization.k8s.io"]
|
||||
resources: ["*"]
|
||||
verbs: ["get", "list", "watch"]
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: n8n-clusterrolebinding
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: n8n-clusterrole
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: n8n
|
||||
namespace: n8n
|
||||
57
k8s/apps/n8n/redis-deployment.yaml
Normal file
57
k8s/apps/n8n/redis-deployment.yaml
Normal file
@@ -0,0 +1,57 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: n8n-redis
|
||||
labels:
|
||||
app: redis
|
||||
component: n8n
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: redis
|
||||
component: n8n
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: redis
|
||||
component: n8n
|
||||
spec:
|
||||
containers:
|
||||
- name: redis
|
||||
image: redis:7-alpine
|
||||
ports:
|
||||
- containerPort: 6379
|
||||
name: redis
|
||||
command:
|
||||
- redis-server
|
||||
- --appendonly
|
||||
- "yes"
|
||||
- --save
|
||||
- "900 1"
|
||||
volumeMounts:
|
||||
- name: redis-data
|
||||
mountPath: /data
|
||||
resources:
|
||||
requests:
|
||||
cpu: 50m
|
||||
memory: 64Mi
|
||||
limits:
|
||||
cpu: 200m
|
||||
memory: 256Mi
|
||||
livenessProbe:
|
||||
tcpSocket:
|
||||
port: 6379
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
readinessProbe:
|
||||
exec:
|
||||
command:
|
||||
- redis-cli
|
||||
- ping
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
volumes:
|
||||
- name: redis-data
|
||||
emptyDir: {}
|
||||
18
k8s/apps/n8n/redis-service.yaml
Normal file
18
k8s/apps/n8n/redis-service.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: n8n-redis
|
||||
labels:
|
||||
app: redis
|
||||
component: n8n
|
||||
spec:
|
||||
selector:
|
||||
app: redis
|
||||
component: n8n
|
||||
ports:
|
||||
- name: redis
|
||||
port: 6379
|
||||
targetPort: 6379
|
||||
protocol: TCP
|
||||
type: ClusterIP
|
||||
17
k8s/apps/n8n/service.yaml
Normal file
17
k8s/apps/n8n/service.yaml
Normal file
@@ -0,0 +1,17 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: n8n
|
||||
labels:
|
||||
app: n8n
|
||||
spec:
|
||||
selector:
|
||||
app: n8n
|
||||
component: main
|
||||
ports:
|
||||
- name: http
|
||||
port: 80
|
||||
targetPort: 5678
|
||||
protocol: TCP
|
||||
type: ClusterIP
|
||||
24
k8s/apps/n8n/storage.yaml
Normal file
24
k8s/apps/n8n/storage.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: n8n-data
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteMany
|
||||
storageClassName: longhorn
|
||||
resources:
|
||||
requests:
|
||||
storage: 10Gi
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: n8n-tools
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteMany
|
||||
storageClassName: longhorn
|
||||
resources:
|
||||
requests:
|
||||
storage: 20Gi
|
||||
24
k8s/apps/n8n/values-yacy.yaml
Normal file
24
k8s/apps/n8n/values-yacy.yaml
Normal file
@@ -0,0 +1,24 @@
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
|
||||
resources:
|
||||
limits:
|
||||
memory: 2Gi
|
||||
requests:
|
||||
memory: 1Gi
|
||||
|
||||
persistence:
|
||||
enabled: true
|
||||
size: 10Gi
|
||||
|
||||
yacy:
|
||||
network:
|
||||
mode: "intranet"
|
||||
config:
|
||||
network.unit.bootstrap.seedlist: ""
|
||||
network.unit.remotecrawl: "false"
|
||||
network.unit.dhtredundancy.junior: "1"
|
||||
network.unit.dhtredundancy.senior: "1"
|
||||
index.receive.allow: "false"
|
||||
index.distribute.allow: "false"
|
||||
crawl.response.timeout: "10000"
|
||||
@@ -4,6 +4,7 @@ kind: Kustomization
|
||||
resources:
|
||||
- app.yaml
|
||||
- external-secrets.yaml
|
||||
- paperless-ai.yaml
|
||||
|
||||
helmCharts:
|
||||
- name: paperless-ngx
|
||||
|
||||
101
k8s/apps/paperless/paperless-ai.yaml
Normal file
101
k8s/apps/paperless/paperless-ai.yaml
Normal file
@@ -0,0 +1,101 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: paperless-ai
|
||||
labels:
|
||||
app: paperless-ai
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: paperless-ai
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: paperless-ai
|
||||
spec:
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: nas.homenet
|
||||
containers:
|
||||
- name: paperless-ai
|
||||
image: clusterzx/paperless-ai:latest
|
||||
imagePullPolicy: Always
|
||||
ports:
|
||||
- containerPort: 3000
|
||||
name: http
|
||||
env:
|
||||
- name: NODE_ENV
|
||||
value: production
|
||||
- name: PAPERLESS_AI_PORT
|
||||
value: "3000"
|
||||
resources:
|
||||
requests:
|
||||
memory: 512Mi
|
||||
cpu: 500m
|
||||
limits:
|
||||
memory: 1024Mi
|
||||
cpu: 2000m
|
||||
#livenessProbe:
|
||||
# httpGet:
|
||||
# path: /
|
||||
# port: 8000
|
||||
# initialDelaySeconds: 30
|
||||
# periodSeconds: 10
|
||||
#readinessProbe:
|
||||
# httpGet:
|
||||
# path: /
|
||||
# port: 8000
|
||||
# initialDelaySeconds: 5
|
||||
# periodSeconds: 5
|
||||
volumeMounts:
|
||||
- name: data
|
||||
mountPath: /app/data
|
||||
volumes:
|
||||
- name: data
|
||||
hostPath:
|
||||
path: /mnt/storage/Storage/k8s/paperless/ai-data
|
||||
type: DirectoryOrCreate
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: paperless-ai
|
||||
namespace: paperless
|
||||
labels:
|
||||
app: paperless-ai
|
||||
spec:
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- port: 3000
|
||||
targetPort: 3000
|
||||
protocol: TCP
|
||||
name: http
|
||||
selector:
|
||||
app: paperless-ai
|
||||
---
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: paperless-ai-ingress
|
||||
annotations:
|
||||
ingressClassName: traefik
|
||||
cert-manager.io/cluster-issuer: letsencrypt
|
||||
traefik.ingress.kubernetes.io/router.middlewares: kube-system-https-redirect@kubernetescrd
|
||||
acme.cert-manager.io/http01-edit-in-place: "true"
|
||||
spec:
|
||||
rules:
|
||||
- host: ai-docs.hexor.cy
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: paperless-ai
|
||||
port:
|
||||
number: 3000
|
||||
tls:
|
||||
- secretName: docs-tls
|
||||
hosts:
|
||||
- '*.hexor.cy'
|
||||
@@ -1,5 +1,5 @@
|
||||
image:
|
||||
tag: 2.19.3
|
||||
tag: 2.20.3
|
||||
resources:
|
||||
requests:
|
||||
memory: "1Gi"
|
||||
|
||||
@@ -1,212 +0,0 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: pasarguard-scripts-ingress
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
data:
|
||||
init-uuid-ingress.sh: |
|
||||
#!/bin/bash
|
||||
set -e
|
||||
echo "Started"
|
||||
# NODE_NAME is already set via environment variable
|
||||
NAMESPACE=$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace)
|
||||
|
||||
# Get DNS name from node label xray-public-address
|
||||
DNS_NAME=$(kubectl get node "${NODE_NAME}" -o jsonpath='{.metadata.labels.xray-public-address}')
|
||||
|
||||
if [ -z "${DNS_NAME}" ]; then
|
||||
echo "ERROR: Node ${NODE_NAME} does not have label 'xray-public-address'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Node: ${NODE_NAME}"
|
||||
echo "DNS Name from label: ${DNS_NAME}"
|
||||
|
||||
# Use DNS name for ConfigMap name to ensure uniqueness
|
||||
CONFIGMAP_NAME="node-uuid-ingress-${DNS_NAME//./-}"
|
||||
|
||||
echo "Checking ConfigMap: ${CONFIGMAP_NAME}"
|
||||
|
||||
# Check if ConfigMap exists and get UUID
|
||||
if kubectl get configmap "${CONFIGMAP_NAME}" -n "${NAMESPACE}" &>/dev/null; then
|
||||
echo "ConfigMap exists, reading UUID..."
|
||||
API_KEY=$(kubectl get configmap "${CONFIGMAP_NAME}" -n "${NAMESPACE}" -o jsonpath='{.data.API_KEY}')
|
||||
|
||||
if [ -z "${API_KEY}" ]; then
|
||||
echo "UUID not found in ConfigMap, generating new one..."
|
||||
API_KEY=$(cat /proc/sys/kernel/random/uuid)
|
||||
kubectl patch configmap "${CONFIGMAP_NAME}" -n "${NAMESPACE}" --type merge -p "{\"data\":{\"API_KEY\":\"${API_KEY}\"}}"
|
||||
else
|
||||
echo "Using existing UUID from ConfigMap"
|
||||
fi
|
||||
else
|
||||
echo "ConfigMap does not exist, creating new one..."
|
||||
API_KEY=$(cat /proc/sys/kernel/random/uuid)
|
||||
kubectl create configmap "${CONFIGMAP_NAME}" -n "${NAMESPACE}" \
|
||||
--from-literal=API_KEY="${API_KEY}" \
|
||||
--from-literal=NODE_NAME="${NODE_NAME}"
|
||||
fi
|
||||
|
||||
# Save UUID and node info to shared volume for the main container
|
||||
echo -n "${API_KEY}" > /shared/api-key
|
||||
echo -n "${NODE_NAME}" > /shared/node-name
|
||||
echo -n "${CONFIGMAP_NAME}" > /shared/configmap-name
|
||||
echo "UUID initialized: ${API_KEY}"
|
||||
echo "Node name: ${NODE_NAME}"
|
||||
echo "ConfigMap: ${CONFIGMAP_NAME}"
|
||||
|
||||
# Create Certificate for this node using DNS name from label
|
||||
CERT_NAME="pasarguard-node-ingress-${DNS_NAME//./-}"
|
||||
|
||||
echo "Creating Certificate: ${CERT_NAME} for ${DNS_NAME}"
|
||||
|
||||
# Check if Certificate already exists
|
||||
if ! kubectl get certificate "${CERT_NAME}" -n "${NAMESPACE}" &>/dev/null; then
|
||||
echo "Certificate does not exist, creating..."
|
||||
cat <<EOF | kubectl apply -f -
|
||||
apiVersion: cert-manager.io/v1
|
||||
kind: Certificate
|
||||
metadata:
|
||||
name: ${CERT_NAME}
|
||||
namespace: ${NAMESPACE}
|
||||
spec:
|
||||
secretName: ${CERT_NAME}-tls
|
||||
issuerRef:
|
||||
name: letsencrypt
|
||||
kind: ClusterIssuer
|
||||
dnsNames:
|
||||
- ${DNS_NAME}
|
||||
EOF
|
||||
else
|
||||
echo "Certificate already exists"
|
||||
fi
|
||||
|
||||
# Wait for certificate to be ready
|
||||
|
||||
echo "Waiting for certificate to be ready..."
|
||||
for i in {1..600}; do
|
||||
if kubectl get secret "${CERT_NAME}-tls" -n "${NAMESPACE}" &>/dev/null; then
|
||||
echo "Certificate secret is ready!"
|
||||
break
|
||||
fi
|
||||
echo "Waiting for certificate... ($i/600)"
|
||||
sleep 1
|
||||
done
|
||||
|
||||
if ! kubectl get secret "${CERT_NAME}-tls" -n "${NAMESPACE}" &>/dev/null; then
|
||||
echo "WARNING: Certificate secret not ready after 600 seconds"
|
||||
else
|
||||
# Extract certificate and key from secret to shared volume
|
||||
echo "Extracting certificate and key..."
|
||||
kubectl get secret "${CERT_NAME}-tls" -n "${NAMESPACE}" -o jsonpath='{.data.tls\.crt}' | base64 -d > /shared/tls.crt
|
||||
kubectl get secret "${CERT_NAME}-tls" -n "${NAMESPACE}" -o jsonpath='{.data.tls\.key}' | base64 -d > /shared/tls.key
|
||||
echo "Certificate and key extracted successfully."
|
||||
cat /shared/tls.crt
|
||||
fi
|
||||
|
||||
# Create ClusterIP Service for this node (pod selector based)
|
||||
NODE_SHORT_NAME="${NODE_NAME%%.*}"
|
||||
SERVICE_NAME="${NODE_SHORT_NAME}-ingress"
|
||||
|
||||
echo "Creating Service: ${SERVICE_NAME} for node ${NODE_NAME} (short: ${NODE_SHORT_NAME})"
|
||||
|
||||
# Create Service with pod selector including node name
|
||||
cat <<EOF | kubectl apply -f -
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: ${SERVICE_NAME}
|
||||
namespace: ${NAMESPACE}
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
node: ${NODE_NAME}
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: pasarguard-node-ingress
|
||||
node-name: ${NODE_SHORT_NAME}
|
||||
ports:
|
||||
- name: proxy
|
||||
port: 443
|
||||
protocol: TCP
|
||||
targetPort: 443
|
||||
- name: api
|
||||
port: 62050
|
||||
protocol: TCP
|
||||
targetPort: 62050
|
||||
EOF
|
||||
|
||||
echo "Service created: ${SERVICE_NAME}.${NAMESPACE}.svc.cluster.local"
|
||||
|
||||
# Create IngressRouteTCP for this DNS name with TLS passthrough
|
||||
INGRESS_NAME="pasarguard-tcp-${DNS_NAME//./-}"
|
||||
|
||||
echo "Creating IngressRouteTCP: ${INGRESS_NAME} for ${DNS_NAME}"
|
||||
|
||||
cat <<EOF | kubectl apply -f -
|
||||
apiVersion: traefik.io/v1alpha1
|
||||
kind: IngressRouteTCP
|
||||
metadata:
|
||||
name: ${INGRESS_NAME}
|
||||
namespace: ${NAMESPACE}
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
node: ${NODE_NAME}
|
||||
spec:
|
||||
entryPoints:
|
||||
- websecure
|
||||
routes:
|
||||
- match: HostSNI(\`${DNS_NAME}\`)
|
||||
services:
|
||||
- name: ${SERVICE_NAME}
|
||||
port: 443
|
||||
tls:
|
||||
passthrough: true
|
||||
EOF
|
||||
|
||||
echo "IngressRouteTCP created: ${INGRESS_NAME}"
|
||||
echo "Traffic to ${DNS_NAME}:443 will be routed to ${SERVICE_NAME}:443"
|
||||
|
||||
# Create second IngressRouteTCP for API port 62051
|
||||
INGRESS_API_NAME="pasarguard-api-${DNS_NAME//./-}"
|
||||
|
||||
echo "Creating IngressRouteTCP for API: ${INGRESS_API_NAME} for ${DNS_NAME}:62051"
|
||||
|
||||
cat <<EOF | kubectl apply -f -
|
||||
apiVersion: traefik.io/v1alpha1
|
||||
kind: IngressRouteTCP
|
||||
metadata:
|
||||
name: ${INGRESS_API_NAME}
|
||||
namespace: ${NAMESPACE}
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
node: ${NODE_NAME}
|
||||
spec:
|
||||
entryPoints:
|
||||
- pasarguard-api
|
||||
routes:
|
||||
- match: HostSNI(\`${DNS_NAME}\`)
|
||||
services:
|
||||
- name: ${SERVICE_NAME}
|
||||
port: 62050
|
||||
tls:
|
||||
passthrough: true
|
||||
EOF
|
||||
|
||||
echo "IngressRouteTCP API created: ${INGRESS_API_NAME}"
|
||||
echo "Traffic to ${DNS_NAME}:62051 will be routed to ${SERVICE_NAME}:62050"
|
||||
|
||||
pasarguard-start.sh: |
|
||||
#!/bin/sh
|
||||
# Read API_KEY from shared volume created by init container
|
||||
if [ -f /shared/api-key ]; then
|
||||
export API_KEY=$(cat /shared/api-key)
|
||||
echo "Loaded API_KEY from shared volume"
|
||||
else
|
||||
echo "WARNING: API_KEY file not found, using default"
|
||||
fi
|
||||
|
||||
cd /app
|
||||
exec ./main
|
||||
@@ -1,211 +0,0 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: pasarguard-node-ingress
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: Role
|
||||
metadata:
|
||||
name: pasarguard-node-ingress-configmap
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
rules:
|
||||
- apiGroups: [""]
|
||||
resources: ["configmaps"]
|
||||
verbs: ["get", "list", "create", "update", "patch"]
|
||||
- apiGroups: ["cert-manager.io"]
|
||||
resources: ["certificates"]
|
||||
verbs: ["get", "list", "create", "update", "patch", "delete"]
|
||||
- apiGroups: [""]
|
||||
resources: ["secrets"]
|
||||
verbs: ["get", "list"]
|
||||
- apiGroups: [""]
|
||||
resources: ["services", "endpoints"]
|
||||
verbs: ["get", "list", "create", "update", "patch", "delete"]
|
||||
- apiGroups: ["traefik.io", "traefik.containo.us"]
|
||||
resources: ["ingressroutetcps"]
|
||||
verbs: ["get", "list", "create", "update", "patch", "delete"]
|
||||
- apiGroups: [""]
|
||||
resources: ["pods"]
|
||||
verbs: ["get", "list", "patch", "update"]
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: RoleBinding
|
||||
metadata:
|
||||
name: pasarguard-node-ingress-configmap
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: Role
|
||||
name: pasarguard-node-ingress-configmap
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: pasarguard-node-ingress
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: pasarguard-node-ingress-reader
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
rules:
|
||||
- apiGroups: [""]
|
||||
resources: ["nodes"]
|
||||
verbs: ["get", "list"]
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: pasarguard-node-ingress-reader
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: pasarguard-node-ingress-reader
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: pasarguard-node-ingress
|
||||
namespace: pasarguard
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: DaemonSet
|
||||
metadata:
|
||||
name: pasarguard-node-ingress
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: pasarguard-node-ingress
|
||||
revisionHistoryLimit: 3
|
||||
updateStrategy:
|
||||
type: RollingUpdate
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
spec:
|
||||
serviceAccountName: pasarguard-node-ingress
|
||||
affinity:
|
||||
nodeAffinity:
|
||||
requiredDuringSchedulingIgnoredDuringExecution:
|
||||
nodeSelectorTerms:
|
||||
- matchExpressions:
|
||||
- key: xray-public-address
|
||||
operator: Exists
|
||||
initContainers:
|
||||
- name: label-pod
|
||||
image: bitnami/kubectl:latest
|
||||
env:
|
||||
- name: POD_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.name
|
||||
- name: POD_NAMESPACE
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.namespace
|
||||
- name: NODE_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.nodeName
|
||||
command:
|
||||
- /bin/bash
|
||||
- -c
|
||||
- |
|
||||
# Add node label to pod
|
||||
NODE_SHORT=$(echo ${NODE_NAME} | cut -d. -f1)
|
||||
kubectl label pod ${POD_NAME} -n ${POD_NAMESPACE} node-name=${NODE_SHORT} --overwrite
|
||||
- name: init-uuid
|
||||
image: bitnami/kubectl:latest
|
||||
env:
|
||||
- name: GODEBUG
|
||||
value: "x509sha1=1"
|
||||
- name: NODE_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.nodeName
|
||||
command:
|
||||
- /bin/bash
|
||||
- /scripts/init-uuid-ingress.sh
|
||||
volumeMounts:
|
||||
- name: shared-data
|
||||
mountPath: /shared
|
||||
- name: scripts
|
||||
mountPath: /scripts
|
||||
containers:
|
||||
- name: pasarguard-node
|
||||
image: 'pasarguard/node:v0.1.3'
|
||||
imagePullPolicy: Always
|
||||
command:
|
||||
- /bin/sh
|
||||
- /scripts/pasarguard-start.sh
|
||||
ports:
|
||||
- name: api
|
||||
containerPort: 62050
|
||||
protocol: TCP
|
||||
- name: proxy
|
||||
containerPort: 443
|
||||
protocol: TCP
|
||||
env:
|
||||
- name: NODE_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.nodeName
|
||||
- name: NODE_HOST
|
||||
value: "0.0.0.0"
|
||||
- name: SERVICE_PORT
|
||||
value: "62050"
|
||||
- name: SERVICE_PROTOCOL
|
||||
value: "grpc"
|
||||
- name: DEBUG
|
||||
value: "true"
|
||||
- name: SSL_CERT_FILE
|
||||
value: "/shared/tls.crt"
|
||||
- name: SSL_KEY_FILE
|
||||
value: "/shared/tls.key"
|
||||
- name: XRAY_EXECUTABLE_PATH
|
||||
value: "/usr/local/bin/xray"
|
||||
- name: XRAY_ASSETS_PATH
|
||||
value: "/usr/local/share/xray"
|
||||
- name: API_KEY
|
||||
value: "change-this-to-a-secure-uuid"
|
||||
livenessProbe:
|
||||
tcpSocket:
|
||||
port: 62050
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
readinessProbe:
|
||||
tcpSocket:
|
||||
port: 62050
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 3
|
||||
resources:
|
||||
requests:
|
||||
memory: "128Mi"
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "750m"
|
||||
volumeMounts:
|
||||
- name: shared-data
|
||||
mountPath: /shared
|
||||
readOnly: false
|
||||
- name: scripts
|
||||
mountPath: /scripts
|
||||
volumes:
|
||||
- name: shared-data
|
||||
emptyDir: {}
|
||||
- name: scripts
|
||||
configMap:
|
||||
name: pasarguard-scripts-ingress
|
||||
defaultMode: 0755
|
||||
@@ -113,7 +113,7 @@ spec:
|
||||
mountPath: /scripts
|
||||
containers:
|
||||
- name: pasarguard-node
|
||||
image: 'pasarguard/node:v0.1.3'
|
||||
image: 'pasarguard/node:v0.2.1'
|
||||
imagePullPolicy: Always
|
||||
command:
|
||||
- /bin/sh
|
||||
@@ -162,10 +162,10 @@ spec:
|
||||
resources:
|
||||
requests:
|
||||
memory: "128Mi"
|
||||
cpu: "100m"
|
||||
#cpu: "500m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "750m"
|
||||
#cpu: "1200m"
|
||||
volumeMounts:
|
||||
- name: shared-data
|
||||
mountPath: /shared
|
||||
@@ -205,7 +205,7 @@ spec:
|
||||
cpu: "50m"
|
||||
limits:
|
||||
memory: "128Mi"
|
||||
cpu: "150m"
|
||||
cpu: "500m"
|
||||
volumeMounts:
|
||||
- name: shared-data
|
||||
mountPath: /shared
|
||||
|
||||
@@ -9,6 +9,3 @@ resources:
|
||||
- ./certificate.yaml
|
||||
- ./configmap-scripts.yaml
|
||||
- ./servicemonitor.yaml
|
||||
- ./configmap-scripts-ingress.yaml
|
||||
# - ./daemonset-ingress.yaml
|
||||
# - ./traefik-pasarguard-entrypoint.yaml
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: traefik
|
||||
namespace: kube-system
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: traefik
|
||||
args:
|
||||
- --entryPoints.metrics.address=:9100/tcp
|
||||
- --entryPoints.traefik.address=:8080/tcp
|
||||
- --entryPoints.web.address=:8000/tcp
|
||||
- --entryPoints.websecure.address=:8443/tcp
|
||||
- --entryPoints.pasarguard-api.address=:62051/tcp
|
||||
- --api.dashboard=true
|
||||
- --ping=true
|
||||
- --metrics.prometheus=true
|
||||
- --metrics.prometheus.entrypoint=metrics
|
||||
- --providers.kubernetescrd
|
||||
- --providers.kubernetescrd.allowEmptyServices=true
|
||||
- --providers.kubernetesingress
|
||||
- --providers.kubernetesingress.allowEmptyServices=true
|
||||
- --providers.kubernetesingress.ingressendpoint.publishedservice=kube-system/traefik
|
||||
- --entryPoints.websecure.http.tls=true
|
||||
- --log.level=INFO
|
||||
- --entryPoints.web.transport.respondingTimeouts.readTimeout=0s
|
||||
- --entryPoints.websecure.transport.respondingTimeouts.readTimeout=0s
|
||||
ports:
|
||||
- containerPort: 9100
|
||||
name: metrics
|
||||
protocol: TCP
|
||||
- containerPort: 8080
|
||||
name: traefik
|
||||
protocol: TCP
|
||||
- containerPort: 8000
|
||||
name: web
|
||||
protocol: TCP
|
||||
- containerPort: 8443
|
||||
name: websecure
|
||||
protocol: TCP
|
||||
- containerPort: 62051
|
||||
name: pasarguard-api
|
||||
protocol: TCP
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: traefik
|
||||
namespace: kube-system
|
||||
spec:
|
||||
ports:
|
||||
- name: web
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: web
|
||||
- name: websecure
|
||||
port: 443
|
||||
protocol: TCP
|
||||
targetPort: websecure
|
||||
- name: pasarguard-api
|
||||
port: 62051
|
||||
protocol: TCP
|
||||
targetPort: pasarguard-api
|
||||
@@ -16,18 +16,18 @@ helmCharts:
|
||||
valuesFile: syncthing-master.yaml
|
||||
includeCRDs: true
|
||||
|
||||
- name: syncthing
|
||||
repo: https://k8s-home-lab.github.io/helm-charts
|
||||
version: 4.0.0
|
||||
releaseName: syncthing-khv
|
||||
namespace: syncthing
|
||||
valuesFile: syncthing-khv.yaml
|
||||
includeCRDs: true
|
||||
|
||||
- name: syncthing
|
||||
repo: https://k8s-home-lab.github.io/helm-charts
|
||||
version: 4.0.0
|
||||
releaseName: syncthing-nas
|
||||
namespace: syncthing
|
||||
valuesFile: syncthing-nas.yaml
|
||||
includeCRDs: true
|
||||
includeCRDs: true
|
||||
|
||||
# - name: syncthing
|
||||
# repo: https://k8s-home-lab.github.io/helm-charts
|
||||
# version: 4.0.0
|
||||
# releaseName: syncthing-khv
|
||||
# namespace: syncthing
|
||||
# valuesFile: syncthing-khv.yaml
|
||||
# includeCRDs: true
|
||||
@@ -1,4 +1,3 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
@@ -23,7 +22,7 @@ spec:
|
||||
kubernetes.io/hostname: home.homenet
|
||||
containers:
|
||||
- name: desubot
|
||||
image: 'ultradesu/desubot:latest'
|
||||
image: "ultradesu/desubot:latest"
|
||||
imagePullPolicy: Always
|
||||
envFrom:
|
||||
- secretRef:
|
||||
@@ -32,11 +31,11 @@ spec:
|
||||
- name: RUST_LOG
|
||||
value: "info"
|
||||
volumeMounts:
|
||||
- mountPath: /storage
|
||||
name: storage
|
||||
- mountPath: /storage
|
||||
name: storage
|
||||
volumes:
|
||||
- name: storage
|
||||
nfs:
|
||||
server: nas.homenet
|
||||
path: /mnt/storage/Storage/k8s/desubot/
|
||||
readOnly: false
|
||||
persistentVolumeClaim:
|
||||
claimName: desubot-storage
|
||||
readOnly: false
|
||||
|
||||
|
||||
@@ -8,3 +8,5 @@ resources:
|
||||
- external-secrets.yaml
|
||||
- desubot.yaml
|
||||
- restart-job.yaml
|
||||
- storage.yaml
|
||||
|
||||
|
||||
12
k8s/apps/tg-bots/storage.yaml
Normal file
12
k8s/apps/tg-bots/storage.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: desubot-storage
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteMany
|
||||
storageClassName: nfs-csi
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
31
k8s/apps/xandikos/external-secrets.yaml
Normal file
31
k8s/apps/xandikos/external-secrets.yaml
Normal file
@@ -0,0 +1,31 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: mmdl-secrets
|
||||
spec:
|
||||
target:
|
||||
name: mmdl-secrets
|
||||
deletionPolicy: Delete
|
||||
template:
|
||||
type: Opaque
|
||||
data:
|
||||
DB_DIALECT: 'postgres'
|
||||
DB_HOST: psql.psql.svc
|
||||
DB_USER: mmdl
|
||||
DB_NAME: mmdl
|
||||
DB_PORT: "5432"
|
||||
DB_PASS: |-
|
||||
{{ .pg_pass }}
|
||||
AES_PASSWORD: |-
|
||||
{{ .pg_pass }}
|
||||
|
||||
data:
|
||||
- secretKey: pg_pass
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[12].value
|
||||
@@ -7,5 +7,5 @@ resources:
|
||||
- mmdl-deployment.yaml
|
||||
- mmdl-service.yaml
|
||||
- ingress.yaml
|
||||
|
||||
- external-secrets.yaml
|
||||
|
||||
|
||||
@@ -26,6 +26,9 @@ spec:
|
||||
- name: mmdl
|
||||
image: intriin/mmdl:latest
|
||||
imagePullPolicy: Always
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: mmdl-secrets
|
||||
env:
|
||||
- name: NEXTAUTH_URL
|
||||
value: "https://cal.hexor.cy"
|
||||
|
||||
@@ -47,3 +47,20 @@ spec:
|
||||
server: https://kubernetes.default.svc
|
||||
sourceRepos:
|
||||
- ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
|
||||
---
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: AppProject
|
||||
metadata:
|
||||
name: desktop
|
||||
namespace: argocd
|
||||
spec:
|
||||
clusterResourceWhitelist:
|
||||
- group: '*'
|
||||
kind: '*'
|
||||
description: Hexor Home Lab Desktop Apps
|
||||
destinations:
|
||||
- namespace: '*'
|
||||
server: https://kubernetes.default.svc
|
||||
sourceRepos:
|
||||
- ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
global:
|
||||
domain: ag.hexor.cy
|
||||
nodeSelector:
|
||||
nodeSelector: &nodeSelector
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
logging:
|
||||
format: text
|
||||
@@ -55,15 +55,15 @@ configs:
|
||||
|
||||
controller:
|
||||
replicas: 1
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
nodeSelector:
|
||||
<<: *nodeSelector
|
||||
# Add resources (requests/limits), PDB etc. if needed
|
||||
|
||||
# Dex OIDC provider
|
||||
dex:
|
||||
replicas: 1
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
<<: *nodeSelector
|
||||
enabled: false
|
||||
|
||||
# Standard Redis disabled because Redis HA is enabled
|
||||
@@ -86,7 +86,7 @@ redis-ha:
|
||||
server:
|
||||
replicas: 1
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
<<: *nodeSelector
|
||||
ingress:
|
||||
enabled: false
|
||||
|
||||
@@ -99,8 +99,11 @@ server:
|
||||
# Repository Server
|
||||
repoServer:
|
||||
replicas: 1
|
||||
livenessProbe:
|
||||
timeoutSeconds: 10
|
||||
periodSeconds: 60
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
<<: *nodeSelector
|
||||
# Add resources (requests/limits), PDB etc. if needed
|
||||
|
||||
# ApplicationSet Controller
|
||||
@@ -108,7 +111,7 @@ applicationSet:
|
||||
enabled: true # Enabled by default
|
||||
replicas: 1
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
<<: *nodeSelector
|
||||
# Add resources (requests/limits), PDB etc. if needed
|
||||
|
||||
# Notifications Controller
|
||||
@@ -116,5 +119,5 @@ notifications:
|
||||
enabled: true # Enabled by default
|
||||
replicas: 1
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
<<: *nodeSelector
|
||||
# Add notifiers, triggers, templates configurations if needed
|
||||
|
||||
@@ -35,5 +35,6 @@ spec:
|
||||
key: secretKey
|
||||
selector:
|
||||
dnsZones:
|
||||
- "ps.hexor.cy"
|
||||
- "of.hexor.cy"
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
FROM debian:sid
|
||||
|
||||
ENV BW_CLI_VERSION=2025.5.0
|
||||
ENV BW_CLI_VERSION=2025.12.1
|
||||
|
||||
RUN apt update && \
|
||||
apt install -y wget unzip && \
|
||||
|
||||
@@ -37,15 +37,15 @@ spec:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
containers:
|
||||
- name: bitwarden-cli
|
||||
image: ultradesu/bitwarden-client:2025.5.0
|
||||
image: ultradesu/bitwarden-client:2025.12.1
|
||||
imagePullPolicy: Always
|
||||
resources:
|
||||
requests:
|
||||
memory: "128Mi"
|
||||
cpu: "100m"
|
||||
cpu: "300m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "500m"
|
||||
cpu: "1000m"
|
||||
env:
|
||||
- name: BW_HOST
|
||||
valueFrom:
|
||||
|
||||
@@ -3,5 +3,15 @@ kind: Kustomization
|
||||
|
||||
resources:
|
||||
- app.yaml
|
||||
- nfs-storage.yaml
|
||||
- coredns-internal-resolve.yaml
|
||||
|
||||
helmCharts:
|
||||
- name: csi-driver-nfs
|
||||
repo: https://raw.githubusercontent.com/kubernetes-csi/csi-driver-nfs/master/charts
|
||||
version: 4.12.0
|
||||
releaseName: csi-driver-nfs
|
||||
namespace: kube-system
|
||||
#valuesFile: values.yaml
|
||||
includeCRDs: true
|
||||
|
||||
|
||||
20
k8s/core/kube-system-custom/nfs-storage.yaml
Normal file
20
k8s/core/kube-system-custom/nfs-storage.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
apiVersion: storage.k8s.io/v1
|
||||
kind: StorageClass
|
||||
metadata:
|
||||
name: nfs-csi
|
||||
provisioner: nfs.csi.k8s.io
|
||||
parameters:
|
||||
server: nas.homenet
|
||||
share: /mnt/storage/Storage/PVC
|
||||
reclaimPolicy: Retain
|
||||
volumeBindingMode: Immediate
|
||||
mountOptions:
|
||||
- nfsvers=4.1
|
||||
- rsize=1048576
|
||||
- wsize=1048576
|
||||
- timeo=14
|
||||
- intr
|
||||
- bg
|
||||
- soft
|
||||
- noatime
|
||||
21
k8s/core/longhorn/app.yaml
Normal file
21
k8s/core/longhorn/app.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: Application
|
||||
metadata:
|
||||
name: longhorn
|
||||
namespace: argocd
|
||||
spec:
|
||||
project: core
|
||||
destination:
|
||||
namespace: longhorn
|
||||
server: https://kubernetes.default.svc
|
||||
source:
|
||||
repoURL: ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
targetRevision: HEAD
|
||||
path: k8s/core/longhorn
|
||||
syncPolicy:
|
||||
automated:
|
||||
selfHeal: true
|
||||
prune: true
|
||||
syncOptions:
|
||||
- CreateNamespace=true
|
||||
|
||||
15
k8s/core/longhorn/kustomization.yaml
Normal file
15
k8s/core/longhorn/kustomization.yaml
Normal file
@@ -0,0 +1,15 @@
|
||||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
#resources:
|
||||
# - app.yaml
|
||||
|
||||
helmCharts:
|
||||
- name: longhorn
|
||||
repo: https://charts.longhorn.io
|
||||
version: 1.11.0
|
||||
releaseName: longhorn
|
||||
namespace: longhorn
|
||||
valuesFile: values.yaml
|
||||
includeCRDs: true
|
||||
|
||||
4
k8s/core/longhorn/values.yaml
Normal file
4
k8s/core/longhorn/values.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
longhornUI:
|
||||
replicas: 1
|
||||
persistence:
|
||||
reclaimPolicy: "Retain"
|
||||
@@ -123,6 +123,10 @@ spec:
|
||||
{{ .remnawave }}
|
||||
USER_umami: |-
|
||||
{{ .umami }}
|
||||
USER_mmdl: |-
|
||||
{{ .mmdl }}
|
||||
USER_n8n: |-
|
||||
{{ .n8n }}
|
||||
data:
|
||||
- secretKey: authentik
|
||||
sourceRef:
|
||||
@@ -245,3 +249,26 @@ spec:
|
||||
metadataPolicy: None
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[11].value
|
||||
- secretKey: mmdl
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[12].value
|
||||
- secretKey: n8n
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[13].value
|
||||
|
||||
|
||||
46
k8s/core/prom-stack/alertmanager-config.yaml
Normal file
46
k8s/core/prom-stack/alertmanager-config.yaml
Normal file
@@ -0,0 +1,46 @@
|
||||
apiVersion: monitoring.coreos.com/v1alpha1
|
||||
kind: AlertmanagerConfig
|
||||
metadata:
|
||||
name: telegram-notifications
|
||||
namespace: prometheus
|
||||
labels:
|
||||
app: kube-prometheus-stack-alertmanager
|
||||
release: prometheus
|
||||
spec:
|
||||
route:
|
||||
groupBy: ['alertname', 'cluster', 'service']
|
||||
groupWait: 10s
|
||||
groupInterval: 5m
|
||||
repeatInterval: 12h
|
||||
receiver: telegram
|
||||
routes:
|
||||
- matchers:
|
||||
- name: alertname
|
||||
value: Watchdog
|
||||
matchType: "="
|
||||
receiver: 'null'
|
||||
receivers:
|
||||
- name: telegram
|
||||
telegramConfigs:
|
||||
- botToken:
|
||||
name: alertmanager-telegram-secret
|
||||
key: TELEGRAM_BOT_TOKEN
|
||||
chatID: 124317807
|
||||
parseMode: HTML
|
||||
sendResolved: true
|
||||
disableNotifications: false
|
||||
message: |
|
||||
{{ if eq .Status "firing" }}🔥 FIRING{{ else }}✅ RESOLVED{{ end }}
|
||||
|
||||
{{ range .Alerts }}
|
||||
📊 <b>{{ .Labels.alertname }}</b>
|
||||
{{ .Annotations.summary }}
|
||||
|
||||
{{ if .Annotations.node }}🖥 <b>Node:</b> <code>{{ .Annotations.node }}</code>{{ end }}
|
||||
{{ if .Annotations.pod }}📦 <b>Pod:</b> <code>{{ .Annotations.pod }}</code>{{ end }}
|
||||
{{ if .Annotations.namespace }}📁 <b>Namespace:</b> <code>{{ .Annotations.namespace }}</code>{{ end }}
|
||||
{{ if .Annotations.throttle_rate }}⚠️ <b>Throttling rate:</b> {{ .Annotations.throttle_rate }}{{ end }}
|
||||
|
||||
🔗 <a href="{{ .GeneratorURL }}">View in Grafana</a>
|
||||
{{ end }}
|
||||
- name: 'null'
|
||||
@@ -13,9 +13,6 @@ spec:
|
||||
targetRevision: HEAD
|
||||
path: k8s/core/prom-stack
|
||||
syncPolicy:
|
||||
automated:
|
||||
selfHeal: true
|
||||
prune: true
|
||||
syncOptions:
|
||||
- CreateNamespace=true
|
||||
- ServerSideApply=true
|
||||
|
||||
@@ -79,3 +79,83 @@ spec:
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[2].value
|
||||
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: alertmanager-telegram
|
||||
spec:
|
||||
target:
|
||||
name: alertmanager-telegram-secret
|
||||
deletionPolicy: Delete
|
||||
template:
|
||||
type: Opaque
|
||||
data:
|
||||
TELEGRAM_BOT_TOKEN: |-
|
||||
{{ .bot_token }}
|
||||
TELEGRAM_CHAT_ID: |-
|
||||
{{ .chat_id }}
|
||||
data:
|
||||
- secretKey: bot_token
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: eca0fb0b-3939-40a8-890a-6294863e5a65
|
||||
property: fields[0].value
|
||||
- secretKey: chat_id
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: eca0fb0b-3939-40a8-890a-6294863e5a65
|
||||
property: fields[1].value
|
||||
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: grafana-telegram
|
||||
spec:
|
||||
target:
|
||||
name: grafana-telegram
|
||||
deletionPolicy: Delete
|
||||
template:
|
||||
type: Opaque
|
||||
data:
|
||||
bot-token: |-
|
||||
{{ .bot_token }}
|
||||
chat-id: |-
|
||||
{{ .chat_id }}
|
||||
data:
|
||||
- secretKey: bot_token
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: eca0fb0b-3939-40a8-890a-6294863e5a65
|
||||
property: fields[0].value
|
||||
- secretKey: chat_id
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: eca0fb0b-3939-40a8-890a-6294863e5a65
|
||||
property: fields[1].value
|
||||
|
||||
|
||||
382
k8s/core/prom-stack/grafana-alerting-configmap.yaml
Normal file
382
k8s/core/prom-stack/grafana-alerting-configmap.yaml
Normal file
@@ -0,0 +1,382 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: grafana-alerting
|
||||
namespace: prometheus
|
||||
data:
|
||||
rules.yaml: |
|
||||
apiVersion: 1
|
||||
groups:
|
||||
- orgId: 1
|
||||
name: pasarguard_alerts
|
||||
folder: Kubernetes
|
||||
interval: 1m
|
||||
rules:
|
||||
- uid: pasarguard_cpu_throttling
|
||||
title: VPN CPU Throttle
|
||||
condition: B
|
||||
data:
|
||||
- refId: A
|
||||
relativeTimeRange:
|
||||
from: 600
|
||||
to: 0
|
||||
datasourceUid: P76F38748CEC837F0
|
||||
model:
|
||||
expr: 'rate(container_cpu_cfs_throttled_periods_total{container="pasarguard-node"}[5m])'
|
||||
refId: A
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: B
|
||||
relativeTimeRange:
|
||||
from: 600
|
||||
to: 0
|
||||
datasourceUid: __expr__
|
||||
model:
|
||||
conditions:
|
||||
- evaluator:
|
||||
params:
|
||||
- 0.1
|
||||
type: gt
|
||||
operator:
|
||||
type: and
|
||||
query:
|
||||
params: []
|
||||
datasource:
|
||||
type: __expr__
|
||||
uid: __expr__
|
||||
expression: A
|
||||
reducer: min
|
||||
refId: B
|
||||
type: reduce
|
||||
noDataState: NoData
|
||||
execErrState: Alerting
|
||||
for: 5m
|
||||
annotations:
|
||||
pod: '{{ $labels.pod }}'
|
||||
node: '{{ $labels.node }}'
|
||||
namespace: '{{ $labels.namespace }}'
|
||||
throttle_rate: '{{ printf "%.2f" $values.A }}'
|
||||
summary: 'VPN node throttling CPU'
|
||||
labels:
|
||||
severity: warning
|
||||
|
||||
- orgId: 1
|
||||
name: kubernetes_alerts
|
||||
folder: Kubernetes
|
||||
interval: 2m
|
||||
rules:
|
||||
- uid: node_not_ready
|
||||
title: Kubernetes Node Not Ready
|
||||
condition: B
|
||||
data:
|
||||
- refId: A
|
||||
relativeTimeRange:
|
||||
from: 600
|
||||
to: 0
|
||||
datasourceUid: P76F38748CEC837F0
|
||||
model:
|
||||
expr: 'kube_node_status_condition{condition="Ready",status="false"}'
|
||||
refId: A
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: B
|
||||
relativeTimeRange:
|
||||
from: 600
|
||||
to: 0
|
||||
datasourceUid: __expr__
|
||||
model:
|
||||
conditions:
|
||||
- evaluator:
|
||||
params:
|
||||
- 0
|
||||
type: gt
|
||||
operator:
|
||||
type: and
|
||||
query:
|
||||
params: []
|
||||
datasource:
|
||||
type: __expr__
|
||||
uid: __expr__
|
||||
expression: A
|
||||
reducer: min
|
||||
refId: B
|
||||
type: reduce
|
||||
noDataState: NoData
|
||||
execErrState: Alerting
|
||||
for: 10m
|
||||
annotations:
|
||||
node: '{{ $labels.node }}'
|
||||
condition: '{{ $labels.condition }}'
|
||||
summary: 'Kubernetes node is not ready'
|
||||
labels:
|
||||
severity: critical
|
||||
|
||||
- uid: node_high_memory_usage
|
||||
title: High Node Memory Usage
|
||||
condition: B
|
||||
data:
|
||||
- refId: A
|
||||
relativeTimeRange:
|
||||
from: 300
|
||||
to: 0
|
||||
datasourceUid: P76F38748CEC837F0
|
||||
model:
|
||||
expr: '(1 - (node_memory_MemAvailable_bytes / node_memory_MemTotal_bytes)) * 100'
|
||||
refId: A
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: B
|
||||
relativeTimeRange:
|
||||
from: 300
|
||||
to: 0
|
||||
datasourceUid: __expr__
|
||||
model:
|
||||
conditions:
|
||||
- evaluator:
|
||||
params:
|
||||
- 80
|
||||
type: gt
|
||||
operator:
|
||||
type: and
|
||||
query:
|
||||
params: []
|
||||
datasource:
|
||||
type: __expr__
|
||||
uid: __expr__
|
||||
expression: A
|
||||
reducer: max
|
||||
refId: B
|
||||
type: reduce
|
||||
noDataState: NoData
|
||||
execErrState: Alerting
|
||||
for: 5m
|
||||
annotations:
|
||||
node: '{{ $labels.instance }}'
|
||||
memory_usage: '{{ printf "%.1f%%" $values.A }}'
|
||||
summary: 'Node memory usage is critically high'
|
||||
labels:
|
||||
severity: warning
|
||||
|
||||
- uid: node_high_cpu_usage
|
||||
title: High Node CPU Usage
|
||||
condition: B
|
||||
data:
|
||||
- refId: A
|
||||
relativeTimeRange:
|
||||
from: 300
|
||||
to: 0
|
||||
datasourceUid: P76F38748CEC837F0
|
||||
model:
|
||||
expr: '100 - (avg by(instance) (rate(node_cpu_seconds_total{mode="idle"}[5m])) * 100)'
|
||||
refId: A
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: B
|
||||
relativeTimeRange:
|
||||
from: 300
|
||||
to: 0
|
||||
datasourceUid: __expr__
|
||||
model:
|
||||
conditions:
|
||||
- evaluator:
|
||||
params:
|
||||
- 80
|
||||
type: gt
|
||||
operator:
|
||||
type: and
|
||||
query:
|
||||
params: []
|
||||
datasource:
|
||||
type: __expr__
|
||||
uid: __expr__
|
||||
expression: A
|
||||
reducer: max
|
||||
refId: B
|
||||
type: reduce
|
||||
noDataState: NoData
|
||||
execErrState: Alerting
|
||||
for: 10m
|
||||
annotations:
|
||||
node: '{{ $labels.instance }}'
|
||||
cpu_usage: '{{ printf "%.1f%%" $values.A }}'
|
||||
summary: 'Node CPU usage is critically high'
|
||||
labels:
|
||||
severity: warning
|
||||
|
||||
- uid: node_high_disk_usage
|
||||
title: High Node Disk Usage
|
||||
condition: B
|
||||
data:
|
||||
- refId: A
|
||||
relativeTimeRange:
|
||||
from: 300
|
||||
to: 0
|
||||
datasourceUid: P76F38748CEC837F0
|
||||
model:
|
||||
expr: '(1 - (node_filesystem_avail_bytes{fstype=~"ext[234]|xfs|zfs|btrfs"} / node_filesystem_size_bytes)) * 100'
|
||||
refId: A
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: B
|
||||
relativeTimeRange:
|
||||
from: 300
|
||||
to: 0
|
||||
datasourceUid: __expr__
|
||||
model:
|
||||
conditions:
|
||||
- evaluator:
|
||||
params:
|
||||
- 85
|
||||
type: gt
|
||||
operator:
|
||||
type: and
|
||||
query:
|
||||
params: []
|
||||
datasource:
|
||||
type: __expr__
|
||||
uid: __expr__
|
||||
expression: A
|
||||
reducer: max
|
||||
refId: B
|
||||
type: reduce
|
||||
noDataState: NoData
|
||||
execErrState: Alerting
|
||||
for: 5m
|
||||
annotations:
|
||||
node: '{{ $labels.instance }}'
|
||||
filesystem: '{{ $labels.mountpoint }}'
|
||||
disk_usage: '{{ printf "%.1f%%" $values.A }}'
|
||||
summary: 'Node disk usage is critically high'
|
||||
labels:
|
||||
severity: critical
|
||||
|
||||
- uid: node_load_average_high
|
||||
title: High Node Load Average
|
||||
condition: B
|
||||
data:
|
||||
- refId: A
|
||||
relativeTimeRange:
|
||||
from: 300
|
||||
to: 0
|
||||
datasourceUid: P76F38748CEC837F0
|
||||
model:
|
||||
expr: 'node_load5 / on(instance) group_left count by(instance)(node_cpu_seconds_total{mode="idle"})'
|
||||
refId: A
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: B
|
||||
relativeTimeRange:
|
||||
from: 300
|
||||
to: 0
|
||||
datasourceUid: __expr__
|
||||
model:
|
||||
conditions:
|
||||
- evaluator:
|
||||
params:
|
||||
- 0.8
|
||||
type: gt
|
||||
operator:
|
||||
type: and
|
||||
query:
|
||||
params: []
|
||||
datasource:
|
||||
type: __expr__
|
||||
uid: __expr__
|
||||
expression: A
|
||||
reducer: max
|
||||
refId: B
|
||||
type: reduce
|
||||
noDataState: NoData
|
||||
execErrState: Alerting
|
||||
for: 5m
|
||||
annotations:
|
||||
node: '{{ $labels.instance }}'
|
||||
load_average: '{{ printf "%.2f" $values.A }}'
|
||||
summary: 'Node load average is high relative to CPU count'
|
||||
labels:
|
||||
severity: warning
|
||||
|
||||
- uid: node_exporter_down
|
||||
title: Node Exporter Down
|
||||
condition: B
|
||||
data:
|
||||
- refId: A
|
||||
relativeTimeRange:
|
||||
from: 300
|
||||
to: 0
|
||||
datasourceUid: P76F38748CEC837F0
|
||||
model:
|
||||
expr: 'up{job="node-exporter"}'
|
||||
refId: A
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: B
|
||||
relativeTimeRange:
|
||||
from: 300
|
||||
to: 0
|
||||
datasourceUid: __expr__
|
||||
model:
|
||||
conditions:
|
||||
- evaluator:
|
||||
params:
|
||||
- 1
|
||||
type: lt
|
||||
operator:
|
||||
type: and
|
||||
query:
|
||||
params: []
|
||||
datasource:
|
||||
type: __expr__
|
||||
uid: __expr__
|
||||
expression: A
|
||||
reducer: min
|
||||
refId: B
|
||||
type: reduce
|
||||
noDataState: NoData
|
||||
execErrState: Alerting
|
||||
for: 2m
|
||||
annotations:
|
||||
node: '{{ $labels.instance }}'
|
||||
summary: 'Node exporter is down - unable to collect metrics'
|
||||
labels:
|
||||
severity: critical
|
||||
|
||||
contactpoints.yaml: |
|
||||
apiVersion: 1
|
||||
contactPoints:
|
||||
- orgId: 1
|
||||
name: telegram
|
||||
receivers:
|
||||
- uid: telegram_default
|
||||
type: telegram
|
||||
disableResolveMessage: false
|
||||
settings:
|
||||
bottoken: $TELEGRAM_BOT_TOKEN
|
||||
chatid: "124317807"
|
||||
message: |
|
||||
{{ if eq .Status "firing" }}🔥 FIRING{{ else }}✅ RESOLVED{{ end }}
|
||||
|
||||
{{ range .Alerts }}
|
||||
📊 <b>{{ .Labels.alertname }}</b>
|
||||
{{ .Annotations.summary }}
|
||||
|
||||
{{ if .Annotations.node }}🖥 <b>Node:</b> <code>{{ .Annotations.node }}</code>{{ end }}
|
||||
{{ if .Annotations.pod }}📦 <b>Pod:</b> <code>{{ .Annotations.pod }}</code>{{ end }}
|
||||
{{ if .Annotations.namespace }}📁 <b>Namespace:</b> <code>{{ .Annotations.namespace }}</code>{{ end }}
|
||||
{{ if .Annotations.throttle_rate }}⚠️ <b>Throttling rate:</b> {{ .Annotations.throttle_rate }}{{ end }}
|
||||
|
||||
🔗 <a href="{{ .GeneratorURL }}">View in Grafana</a>
|
||||
{{ end }}
|
||||
parse_mode: HTML
|
||||
|
||||
policies.yaml: |
|
||||
apiVersion: 1
|
||||
policies:
|
||||
- orgId: 1
|
||||
receiver: telegram
|
||||
group_by:
|
||||
- grafana_folder
|
||||
- alertname
|
||||
group_wait: 10s
|
||||
group_interval: 5m
|
||||
repeat_interval: 12h
|
||||
@@ -38,6 +38,10 @@ datasources:
|
||||
url: http://prometheus-kube-prometheus-prometheus.prometheus.svc:9090
|
||||
access: proxy
|
||||
isDefault: true
|
||||
- name: Loki
|
||||
type: loki
|
||||
url: http://loki-gateway.prometheus.svc:80
|
||||
access: proxy
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
@@ -52,3 +56,30 @@ ingress:
|
||||
hosts:
|
||||
- '*.hexor.cy'
|
||||
|
||||
extraConfigmapMounts:
|
||||
- name: grafana-alerting-rules
|
||||
mountPath: /etc/grafana/provisioning/alerting/rules.yaml
|
||||
configMap: grafana-alerting
|
||||
subPath: rules.yaml
|
||||
readOnly: true
|
||||
- name: grafana-alerting-contactpoints
|
||||
mountPath: /etc/grafana/provisioning/alerting/contactpoints.yaml
|
||||
configMap: grafana-alerting
|
||||
subPath: contactpoints.yaml
|
||||
readOnly: true
|
||||
- name: grafana-alerting-policies
|
||||
mountPath: /etc/grafana/provisioning/alerting/policies.yaml
|
||||
configMap: grafana-alerting
|
||||
subPath: policies.yaml
|
||||
readOnly: true
|
||||
|
||||
envValueFrom:
|
||||
TELEGRAM_BOT_TOKEN:
|
||||
secretKeyRef:
|
||||
name: grafana-telegram
|
||||
key: bot-token
|
||||
TELEGRAM_CHAT_ID:
|
||||
secretKeyRef:
|
||||
name: grafana-telegram
|
||||
key: chat-id
|
||||
|
||||
|
||||
@@ -2,9 +2,10 @@ apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- app.yaml
|
||||
- persistentVolume.yaml
|
||||
- external-secrets.yaml
|
||||
- grafana-alerting-configmap.yaml
|
||||
- alertmanager-config.yaml
|
||||
|
||||
helmCharts:
|
||||
- name: kube-prometheus-stack
|
||||
@@ -23,3 +24,18 @@ helmCharts:
|
||||
valuesFile: grafana-values.yaml
|
||||
includeCRDs: true
|
||||
|
||||
- name: loki
|
||||
repo: https://grafana.github.io/helm-charts
|
||||
version: 6.29.0
|
||||
releaseName: loki
|
||||
namespace: prometheus
|
||||
valuesFile: loki-values.yaml
|
||||
includeCRDs: true
|
||||
|
||||
- name: promtail
|
||||
repo: https://grafana.github.io/helm-charts
|
||||
version: 6.16.6
|
||||
releaseName: promtail
|
||||
namespace: prometheus
|
||||
valuesFile: promtail-values.yaml
|
||||
|
||||
|
||||
75
k8s/core/prom-stack/loki-values.yaml
Normal file
75
k8s/core/prom-stack/loki-values.yaml
Normal file
@@ -0,0 +1,75 @@
|
||||
# Loki SingleBinary mode - optimal for homelab
|
||||
deploymentMode: SingleBinary
|
||||
|
||||
loki:
|
||||
auth_enabled: false
|
||||
commonConfig:
|
||||
replication_factor: 1
|
||||
path_prefix: /var/loki
|
||||
schemaConfig:
|
||||
configs:
|
||||
- from: 2024-01-01
|
||||
store: tsdb
|
||||
object_store: filesystem
|
||||
schema: v13
|
||||
index:
|
||||
prefix: index_
|
||||
period: 24h
|
||||
storage:
|
||||
type: filesystem
|
||||
filesystem:
|
||||
chunks_directory: /var/loki/chunks
|
||||
rules_directory: /var/loki/rules
|
||||
limits_config:
|
||||
reject_old_samples: false
|
||||
ingestion_rate_mb: 16
|
||||
ingestion_burst_size_mb: 32
|
||||
max_query_parallelism: 32
|
||||
volume_enabled: true
|
||||
|
||||
singleBinary:
|
||||
replicas: 1
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
persistence:
|
||||
enabled: true
|
||||
size: 50Gi
|
||||
storageClass: ""
|
||||
|
||||
# Disable distributed mode components
|
||||
read:
|
||||
replicas: 0
|
||||
write:
|
||||
replicas: 0
|
||||
backend:
|
||||
replicas: 0
|
||||
|
||||
# Disable memcached (not needed for SingleBinary)
|
||||
chunksCache:
|
||||
enabled: false
|
||||
resultsCache:
|
||||
enabled: false
|
||||
|
||||
# Gateway for Loki access
|
||||
gateway:
|
||||
enabled: true
|
||||
replicas: 1
|
||||
service:
|
||||
type: ClusterIP
|
||||
|
||||
# Disable tests and canary
|
||||
test:
|
||||
enabled: false
|
||||
lokiCanary:
|
||||
enabled: false
|
||||
|
||||
# Monitoring
|
||||
monitoring:
|
||||
dashboards:
|
||||
enabled: false
|
||||
rules:
|
||||
enabled: false
|
||||
serviceMonitor:
|
||||
enabled: false
|
||||
selfMonitoring:
|
||||
enabled: false
|
||||
@@ -1,6 +1,66 @@
|
||||
grafana:
|
||||
enabled: false
|
||||
|
||||
alertmanager:
|
||||
config:
|
||||
global:
|
||||
telegram_api_url: "https://api.telegram.org"
|
||||
route:
|
||||
group_by: ['alertname', 'cluster', 'service']
|
||||
group_wait: 10s
|
||||
group_interval: 10s
|
||||
repeat_interval: 12h
|
||||
receiver: 'telegram'
|
||||
receivers:
|
||||
- name: 'telegram'
|
||||
telegram_configs:
|
||||
- bot_token: '${TELEGRAM_BOT_TOKEN}'
|
||||
chat_id: ${TELEGRAM_CHAT_ID}
|
||||
parse_mode: 'HTML'
|
||||
message: |
|
||||
{{ range .Alerts }}
|
||||
<b>{{ .Labels.alertname }}</b>
|
||||
{{ if .Labels.severity }}<b>Severity:</b> {{ .Labels.severity }}{{ end }}
|
||||
<b>Status:</b> {{ .Status }}
|
||||
{{ if .Annotations.summary }}<b>Summary:</b> {{ .Annotations.summary }}{{ end }}
|
||||
{{ if .Annotations.description }}<b>Description:</b> {{ .Annotations.description }}{{ end }}
|
||||
{{ end }}
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
ingressClassName: traefik
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: letsencrypt
|
||||
traefik.ingress.kubernetes.io/router.middlewares: kube-system-https-redirect@kubernetescrd
|
||||
hosts:
|
||||
- prom.hexor.cy
|
||||
paths:
|
||||
- /alertmanager
|
||||
tls:
|
||||
- secretName: alertmanager-tls
|
||||
hosts:
|
||||
- prom.hexor.cy
|
||||
alertmanagerSpec:
|
||||
secrets:
|
||||
- alertmanager-telegram-secret
|
||||
externalUrl: https://prom.hexor.cy/alertmanager
|
||||
routePrefix: /alertmanager
|
||||
|
||||
prometheus:
|
||||
ingress:
|
||||
enabled: true
|
||||
ingressClassName: traefik
|
||||
annotations:
|
||||
cert-manager.io/cluster-issuer: letsencrypt
|
||||
traefik.ingress.kubernetes.io/router.middlewares: kube-system-https-redirect@kubernetescrd
|
||||
hosts:
|
||||
- prom.hexor.cy
|
||||
paths:
|
||||
- /
|
||||
tls:
|
||||
- secretName: prometheus-tls
|
||||
hosts:
|
||||
- prom.hexor.cy
|
||||
prometheusSpec:
|
||||
enableRemoteWriteReceiver: true
|
||||
additionalScrapeConfigs:
|
||||
|
||||
37
k8s/core/prom-stack/promtail-values.yaml
Normal file
37
k8s/core/prom-stack/promtail-values.yaml
Normal file
@@ -0,0 +1,37 @@
|
||||
# Promtail - log collection agent for all cluster pods
|
||||
config:
|
||||
clients:
|
||||
- url: http://loki-gateway.prometheus.svc:80/loki/api/v1/push
|
||||
|
||||
# DaemonSet - runs on every node
|
||||
daemonset:
|
||||
enabled: true
|
||||
|
||||
# Tolerations for master/control-plane nodes
|
||||
tolerations:
|
||||
- key: node-role.kubernetes.io/master
|
||||
operator: Exists
|
||||
effect: NoSchedule
|
||||
- key: node-role.kubernetes.io/control-plane
|
||||
operator: Exists
|
||||
effect: NoSchedule
|
||||
|
||||
# Init container to increase inotify limits
|
||||
initContainer:
|
||||
- name: init-inotify
|
||||
image: docker.io/busybox:1.36
|
||||
imagePullPolicy: IfNotPresent
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
- sysctl -w fs.inotify.max_user_instances=512
|
||||
securityContext:
|
||||
privileged: true
|
||||
|
||||
resources:
|
||||
requests:
|
||||
cpu: 50m
|
||||
memory: 64Mi
|
||||
limits:
|
||||
cpu: 200m
|
||||
memory: 128Mi
|
||||
@@ -16,7 +16,7 @@ spec:
|
||||
serviceAccountName: system-upgrade
|
||||
upgrade:
|
||||
image: rancher/k3s-upgrade
|
||||
version: v1.34.2+k3s1
|
||||
version: v1.34.3+k3s1
|
||||
---
|
||||
# Agent plan
|
||||
apiVersion: upgrade.cattle.io/v1
|
||||
@@ -39,5 +39,5 @@ spec:
|
||||
serviceAccountName: system-upgrade
|
||||
upgrade:
|
||||
image: rancher/k3s-upgrade
|
||||
version: v1.34.2+k3s1
|
||||
version: v1.34.3+k3s1
|
||||
|
||||
|
||||
21
k8s/desktop/jellyfin/app.yaml
Normal file
21
k8s/desktop/jellyfin/app.yaml
Normal file
@@ -0,0 +1,21 @@
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: Application
|
||||
metadata:
|
||||
name: jellyfin-uk
|
||||
namespace: argocd
|
||||
spec:
|
||||
project: apps
|
||||
destination:
|
||||
namespace: jellyfin-uk
|
||||
server: https://kubernetes.default.svc
|
||||
source:
|
||||
repoURL: ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
targetRevision: HEAD
|
||||
path: k8s/desktop/jellyfin
|
||||
syncPolicy:
|
||||
automated:
|
||||
selfHeal: true
|
||||
prune: true
|
||||
syncOptions:
|
||||
- CreateNamespace=true
|
||||
|
||||
16
k8s/desktop/jellyfin/kustomization.yaml
Normal file
16
k8s/desktop/jellyfin/kustomization.yaml
Normal file
@@ -0,0 +1,16 @@
|
||||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- app.yaml
|
||||
- qbittorent.yaml
|
||||
|
||||
helmCharts:
|
||||
- name: jellyfin
|
||||
repo: https://utkuozdemir.org/helm-charts
|
||||
version: 2.0.0
|
||||
releaseName: jellyfin
|
||||
namespace: jellyfin
|
||||
valuesFile: values.yaml
|
||||
includeCRDs: true
|
||||
|
||||
123
k8s/desktop/jellyfin/qbittorent.yaml
Normal file
123
k8s/desktop/jellyfin/qbittorent.yaml
Normal file
@@ -0,0 +1,123 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: vpn-creds
|
||||
spec:
|
||||
target:
|
||||
name: vpn-creds
|
||||
deletionPolicy: Delete
|
||||
template:
|
||||
type: Opaque
|
||||
data:
|
||||
ss_link: |-
|
||||
{{ .ss_link }}
|
||||
data:
|
||||
- secretKey: ss_link
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: cfee6f62-fb06-4a4c-b6d8-92da4908c65a
|
||||
property: fields[0].value
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: qbittorrent
|
||||
labels:
|
||||
app: qbittorrent
|
||||
annotations:
|
||||
reloader.stakater.com/auto: "true"
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: qbittorrent
|
||||
replicas: 1
|
||||
strategy:
|
||||
type: RollingUpdate
|
||||
rollingUpdate:
|
||||
maxSurge: 1
|
||||
maxUnavailable: 0
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: qbittorrent
|
||||
spec:
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: uk-desktop.tail2fe2d.ts.net
|
||||
tolerations:
|
||||
- key: workload
|
||||
operator: Equal
|
||||
value: desktop
|
||||
effect: NoSchedule
|
||||
volumes:
|
||||
- name: config
|
||||
hostPath:
|
||||
path: /k8s/qbt-config
|
||||
type: DirectoryOrCreate
|
||||
- name: media
|
||||
hostPath:
|
||||
path: /k8s/media/downloads
|
||||
type: DirectoryOrCreate
|
||||
containers:
|
||||
- name: qbittorrent
|
||||
image: 'linuxserver/qbittorrent:latest'
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8080
|
||||
protocol: TCP
|
||||
volumeMounts:
|
||||
- name: config
|
||||
mountPath: /config
|
||||
- name: media
|
||||
mountPath: /downloads
|
||||
- name: shadowsocks-proxy
|
||||
image: teddysun/shadowsocks-rust:latest
|
||||
env:
|
||||
- name: SS_LINK
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: vpn-creds
|
||||
key: ss_link
|
||||
command: ["/bin/bash", "-c", "rm /etc/shadowsocks-rust/config.json && sslocal --server-url $SS_LINK --local-addr 127.0.0.1:8081 -U --protocol http"]
|
||||
resources:
|
||||
requests:
|
||||
memory: "64Mi"
|
||||
cpu: "300m"
|
||||
limits:
|
||||
memory: "128Mi"
|
||||
cpu: "300m"
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: qbittorrent
|
||||
spec:
|
||||
selector:
|
||||
app: qbittorrent
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 80
|
||||
targetPort: 8080
|
||||
|
||||
---
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: jf-local-ingress
|
||||
annotations:
|
||||
ingressClassName: traefik
|
||||
spec:
|
||||
rules:
|
||||
- host: tr.uk
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: qbittorrent
|
||||
port:
|
||||
number: 80
|
||||
41
k8s/desktop/jellyfin/values.yaml
Normal file
41
k8s/desktop/jellyfin/values.yaml
Normal file
@@ -0,0 +1,41 @@
|
||||
image:
|
||||
tag: 10.11.4
|
||||
resources:
|
||||
requests:
|
||||
memory: "2Gi"
|
||||
cpu: "1000m"
|
||||
limits:
|
||||
memory: "8Gi"
|
||||
cpu: "6000m"
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: uk-desktop.tail2fe2d.ts.net
|
||||
tolerations:
|
||||
- key: workload
|
||||
operator: Equal
|
||||
value: desktop
|
||||
effect: NoSchedule
|
||||
persistence:
|
||||
config:
|
||||
enabled: true
|
||||
isPvc: false
|
||||
customVolume:
|
||||
hostPath:
|
||||
path: /k8s/jellyfin
|
||||
type: DirectoryOrCreate
|
||||
data:
|
||||
enabled: true
|
||||
isPvc: false
|
||||
customVolume:
|
||||
hostPath:
|
||||
path: /k8s/media/downloads
|
||||
type: DirectoryOrCreate
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
className: traefik
|
||||
hosts:
|
||||
- host: jf.uk
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
|
||||
18
k8s/desktop/khm/app.yaml
Normal file
18
k8s/desktop/khm/app.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: Application
|
||||
metadata:
|
||||
name: khm-client
|
||||
namespace: argocd
|
||||
spec:
|
||||
project: desktop
|
||||
destination:
|
||||
namespace: khm
|
||||
server: https://kubernetes.default.svc
|
||||
source:
|
||||
repoURL: ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
targetRevision: HEAD
|
||||
path: k8s/desktop/khm
|
||||
syncPolicy:
|
||||
automated:
|
||||
selfHeal: true
|
||||
prune: true
|
||||
33
k8s/desktop/khm/external-secrets.yaml
Normal file
33
k8s/desktop/khm/external-secrets.yaml
Normal file
@@ -0,0 +1,33 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: khm-client-creds
|
||||
spec:
|
||||
target:
|
||||
name: khm-client-creds
|
||||
deletionPolicy: Delete
|
||||
template:
|
||||
type: Opaque
|
||||
data:
|
||||
USERNAME: |-
|
||||
{{ .username }}
|
||||
PASSWORD: |-
|
||||
{{ .password }}
|
||||
data:
|
||||
- secretKey: username
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: 19c06480-0814-4d1f-aa80-710105989188
|
||||
property: login.username
|
||||
- secretKey: password
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: 19c06480-0814-4d1f-aa80-710105989188
|
||||
property: login.password
|
||||
69
k8s/desktop/khm/khm-client-cronjob.yaml
Normal file
69
k8s/desktop/khm/khm-client-cronjob.yaml
Normal file
@@ -0,0 +1,69 @@
|
||||
---
|
||||
apiVersion: batch/v1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: khm-client
|
||||
labels:
|
||||
app: khm-client
|
||||
spec:
|
||||
schedule: "15 * * * *"
|
||||
concurrencyPolicy: Forbid
|
||||
successfulJobsHistoryLimit: 3
|
||||
failedJobsHistoryLimit: 3
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: khm-client
|
||||
spec:
|
||||
restartPolicy: OnFailure
|
||||
hostNetwork: true
|
||||
nodeSelector:
|
||||
node-role.kubernetes.io/desktop: ""
|
||||
tolerations:
|
||||
- key: workload
|
||||
operator: Equal
|
||||
value: desktop
|
||||
effect: NoSchedule
|
||||
containers:
|
||||
- name: khm-client
|
||||
image: 'ultradesu/khm:latest'
|
||||
imagePullPolicy: Always
|
||||
securityContext:
|
||||
privileged: false
|
||||
resources:
|
||||
requests:
|
||||
memory: "64Mi"
|
||||
cpu: "50m"
|
||||
limits:
|
||||
memory: "256Mi"
|
||||
cpu: "200m"
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- |
|
||||
/usr/local/bin/khm \
|
||||
--known-hosts /host-ssh/known_hosts \
|
||||
--host https://khm.hexor.cy \
|
||||
--flow=private \
|
||||
--basic-auth="${USERNAME}:${PASSWORD}" \
|
||||
--in-place
|
||||
env:
|
||||
- name: USERNAME
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: khm-client-creds
|
||||
key: USERNAME
|
||||
- name: PASSWORD
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: khm-client-creds
|
||||
key: PASSWORD
|
||||
volumeMounts:
|
||||
- name: known-hosts
|
||||
mountPath: /host-ssh/known_hosts
|
||||
volumes:
|
||||
- name: known-hosts
|
||||
hostPath:
|
||||
path: /home/ab/.ssh/known_hosts
|
||||
6
k8s/desktop/khm/kustomization.yaml
Normal file
6
k8s/desktop/khm/kustomization.yaml
Normal file
@@ -0,0 +1,6 @@
|
||||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- external-secrets.yaml
|
||||
- khm-client-cronjob.yaml
|
||||
563
k8s/games/minecraft/configmaps.yaml
Normal file
563
k8s/games/minecraft/configmaps.yaml
Normal file
@@ -0,0 +1,563 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: nginx-config
|
||||
namespace: minecraft
|
||||
data:
|
||||
nginx.conf: |
|
||||
user nginx;
|
||||
worker_processes 1;
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
|
||||
# Custom 502 error page with auto-refresh
|
||||
error_page 502 /502.html;
|
||||
location = /502.html {
|
||||
internal;
|
||||
return 200 '<!DOCTYPE html><html><head><meta charset="utf-8"><title>Server Loading</title><style>body{font-family:Arial,sans-serif;text-align:center;margin-top:100px;background:#f0f0f0}h1{color:#333}p{color:#666;font-size:18px}</style></head><body><h1>Server is loading probably...</h1><p>Please wait a moment and try refreshing the page.</p><script>setTimeout(function(){window.location.reload();}, 10000);</script></body></html>';
|
||||
add_header Content-Type text/html;
|
||||
}
|
||||
|
||||
# Main location - proxy to Minecraft Dynmap
|
||||
location / {
|
||||
# Proxy configuration for Dynmap server
|
||||
proxy_pass http://localhost:8123;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# Inject user authentication meta tag into HTML head
|
||||
sub_filter '<head>' '<head><meta name="remote-user" content="$http_x_authentik_username">';
|
||||
|
||||
# Replace default Dynmap title with custom server name
|
||||
sub_filter 'Minecraft Dynamic Map' "Hexor's MC server";
|
||||
|
||||
# Inject custom script before closing body tag
|
||||
sub_filter "</body>" '<script src="/clients/inject.js"></script></body>';
|
||||
|
||||
# Apply sub_filter replacements globally (not just once)
|
||||
sub_filter_once off;
|
||||
}
|
||||
|
||||
# Serve inject.js and .ps1 scripts inline (no forced download)
|
||||
location = /clients/inject.js {
|
||||
alias /mc/clients/inject.js;
|
||||
default_type application/javascript;
|
||||
}
|
||||
|
||||
location ~ ^/clients/(.+\.ps1)$ {
|
||||
alias /mc/clients/$1;
|
||||
default_type text/plain;
|
||||
}
|
||||
|
||||
# Static file serving for client downloads
|
||||
location /clients/ {
|
||||
alias /mc/clients/;
|
||||
sendfile on; # Enable efficient file serving
|
||||
add_header Content-Disposition "attachment"; # Force download
|
||||
autoindex on; # Enable directory listing
|
||||
gzip off; # Disable compression for downloads
|
||||
chunked_transfer_encoding off; # Disable chunked encoding
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: client-scripts
|
||||
namespace: minecraft
|
||||
data:
|
||||
inject.js: |
|
||||
(function() {
|
||||
function getUsername() {
|
||||
var metas = document.querySelectorAll("meta");
|
||||
for (var i = 0; i < metas.length; i++) {
|
||||
if (metas[i].getAttribute("name") === "remote-user") {
|
||||
return metas[i].getAttribute("content");
|
||||
}
|
||||
}
|
||||
var jwt = document.cookie.split("; ").find(function(row) {
|
||||
return row.startsWith("authentik_session=");
|
||||
});
|
||||
if (jwt) {
|
||||
try {
|
||||
var token = jwt.split("=")[1];
|
||||
var payload = JSON.parse(atob(token.split(".")[1]));
|
||||
return payload.sub || payload.username || "web-user";
|
||||
} catch(e) {}
|
||||
}
|
||||
return "web-user";
|
||||
}
|
||||
|
||||
var username = getUsername();
|
||||
console.log("Username found:", username);
|
||||
|
||||
if (username && username !== "web-user" &&
|
||||
window.location.search.indexOf("playername=") === -1) {
|
||||
var currentUrl = new URL(window.location.href);
|
||||
currentUrl.searchParams.set("playername", username);
|
||||
console.log("Redirecting to:", currentUrl.href);
|
||||
window.location.href = currentUrl.href;
|
||||
}
|
||||
|
||||
document.addEventListener("DOMContentLoaded", function() {
|
||||
// User block
|
||||
var userBlock = document.createElement("div");
|
||||
userBlock.style.cssText = "background-color:#CEC6CB;color:black;padding:8px;text-align:center;font-size:medium;border-radius:4px;position:absolute;top:10px;right:150px;max-width:200px;";
|
||||
userBlock.innerHTML = "Logged in as: <b>" + username + "</b>";
|
||||
document.body.appendChild(userBlock);
|
||||
|
||||
// Info block
|
||||
var infoBlock = document.createElement("p");
|
||||
infoBlock.style.cssText = "background-color:#CEC6CB;color:black;padding:10px;text-align:center;font-size:large;display:inline-block;border-radius:4px;position:absolute;top:10px;left:150px;";
|
||||
infoBlock.innerHTML = 'GEYMERSKIY SOYUZ Server<br>'
|
||||
+ 'Get <a href="https://github.com/PrismLauncher/PrismLauncher/releases/tag/8.4">Prism Launcher</a> '
|
||||
+ 'and <a href="/clients/1.12.2.zip">client.zip</a> for this server. '
|
||||
+ 'Server address <b>minecraft.hexor.cy:30565</b><br>'
|
||||
+ 'Requires <a href="https://www.java.com/en/download/manual.jsp">Java 8</a><br><br>'
|
||||
+ '<a href="#" id="showInstallBtn" style="color:black;text-decoration:underline;">Windows Install Script</a>';
|
||||
document.body.appendChild(infoBlock);
|
||||
|
||||
// Modal
|
||||
var modal = document.createElement("div");
|
||||
modal.id = "installModal";
|
||||
modal.style.cssText = "display:none;position:fixed;z-index:1000;left:0;top:0;width:100%;height:100%;background-color:rgba(0,0,0,0.5);";
|
||||
modal.innerHTML = '<div style="background-color:#CEC6CB;margin:15% auto;padding:10px;border-radius:4px;width:70%;max-width:500px;text-align:center;color:black;font-size:large;">'
|
||||
+ '<h3 style="margin-top:0;color:black;">Windows Installation</h3>'
|
||||
+ '<p style="color:black;">Copy and paste this command into PowerShell:</p>'
|
||||
+ '<textarea id="scriptCommand" readonly style="width:90%;height:60px;font-family:monospace;padding:8px;border:1px solid #888;border-radius:4px;resize:none;background-color:white;color:black;"></textarea>'
|
||||
+ '<br><br>'
|
||||
+ '<button id="copyButton" style="background-color:#CEC6CB;color:black;padding:10px 15px;border:1px solid #888;border-radius:4px;cursor:pointer;margin-right:10px;font-size:large;">Copy</button>'
|
||||
+ '<button id="closeButton" style="background-color:#CEC6CB;color:black;padding:10px 15px;border:1px solid #888;border-radius:4px;cursor:pointer;font-size:large;">Close</button>'
|
||||
+ '</div>';
|
||||
document.body.appendChild(modal);
|
||||
|
||||
// Generate PowerShell command with username
|
||||
function buildPsCommand() {
|
||||
var d = "$";
|
||||
var q = "'";
|
||||
return d + 'f="' + d + 'env:TEMP\\mc-install.ps1"; iwr -useb https://minecraft.hexor.cy/clients/win-install.ps1 -OutFile '
|
||||
+ d + 'f; powershell -ExecutionPolicy Bypass -File ' + d + 'f -Username ' + q + username + q + '; Remove-Item ' + d + 'f';
|
||||
}
|
||||
|
||||
document.getElementById("showInstallBtn").addEventListener("click", function(e) {
|
||||
e.preventDefault();
|
||||
modal.style.display = "block";
|
||||
document.getElementById("scriptCommand").value = buildPsCommand();
|
||||
});
|
||||
|
||||
document.getElementById("closeButton").addEventListener("click", function() {
|
||||
modal.style.display = "none";
|
||||
});
|
||||
|
||||
document.getElementById("copyButton").addEventListener("click", function() {
|
||||
var textarea = document.getElementById("scriptCommand");
|
||||
textarea.select();
|
||||
textarea.setSelectionRange(0, 99999);
|
||||
document.execCommand("copy");
|
||||
var btn = document.getElementById("copyButton");
|
||||
btn.style.borderColor = "#4CAF50";
|
||||
setTimeout(function() { btn.style.borderColor = "#888"; }, 2000);
|
||||
});
|
||||
|
||||
modal.addEventListener("click", function(event) {
|
||||
if (event.target === modal) {
|
||||
modal.style.display = "none";
|
||||
}
|
||||
});
|
||||
});
|
||||
})();
|
||||
|
||||
win-install.ps1: |
|
||||
# Game Setup Script for PrismLauncher and Minecraft Client
|
||||
# This script downloads and configures PrismLauncher with Hexor client
|
||||
|
||||
param(
|
||||
[string]$Username = "",
|
||||
[string]$InstallPath = "$env:USERPROFILE\Games\PrismLauncher"
|
||||
)
|
||||
|
||||
# Enable TLS 1.2 for downloads
|
||||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
|
||||
|
||||
# Function to write colored log messages
|
||||
function Write-Log {
|
||||
param(
|
||||
[string]$Message,
|
||||
[string]$Level = "INFO"
|
||||
)
|
||||
|
||||
switch ($Level) {
|
||||
"INFO" {
|
||||
Write-Host "[" -NoNewline
|
||||
Write-Host "INFO" -ForegroundColor Blue -NoNewline
|
||||
Write-Host "] $Message"
|
||||
}
|
||||
"WARN" {
|
||||
Write-Host "[" -NoNewline
|
||||
Write-Host "WARN" -ForegroundColor Yellow -NoNewline
|
||||
Write-Host "] $Message"
|
||||
}
|
||||
"ERROR" {
|
||||
Write-Host "[" -NoNewline
|
||||
Write-Host "ERROR" -ForegroundColor Red -NoNewline
|
||||
Write-Host "] $Message"
|
||||
}
|
||||
"SUCCESS" {
|
||||
Write-Host "[" -NoNewline
|
||||
Write-Host "SUCCESS" -ForegroundColor Green -NoNewline
|
||||
Write-Host "] $Message"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Function to download file with progress
|
||||
function Download-File {
|
||||
param(
|
||||
[string]$Url,
|
||||
[string]$OutputPath
|
||||
)
|
||||
|
||||
try {
|
||||
Write-Log "Downloading from: $Url"
|
||||
Write-Log "Saving to: $OutputPath"
|
||||
|
||||
if (Test-Path $OutputPath) {
|
||||
Remove-Item $OutputPath -Force
|
||||
Write-Log "Removed existing file to avoid corruption" "WARN"
|
||||
}
|
||||
|
||||
$webClient = New-Object System.Net.WebClient
|
||||
$webClient.DownloadFile($Url, $OutputPath)
|
||||
|
||||
if (Test-Path $OutputPath) {
|
||||
$fileSize = (Get-Item $OutputPath).Length
|
||||
Write-Log "Download completed successfully ($fileSize bytes)" "SUCCESS"
|
||||
return $true
|
||||
} else {
|
||||
Write-Log "Download failed - file not found" "ERROR"
|
||||
return $false
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Write-Log "Download failed: $($_.Exception.Message)" "ERROR"
|
||||
return $false
|
||||
}
|
||||
}
|
||||
|
||||
# Function to extract ZIP archive
|
||||
function Extract-Archive {
|
||||
param(
|
||||
[string]$ArchivePath,
|
||||
[string]$DestinationPath
|
||||
)
|
||||
|
||||
try {
|
||||
Write-Log "Extracting archive: $ArchivePath"
|
||||
Write-Log "Destination: $DestinationPath"
|
||||
|
||||
if (!(Test-Path $ArchivePath)) {
|
||||
Write-Log "Archive file not found: $ArchivePath" "ERROR"
|
||||
return $false
|
||||
}
|
||||
|
||||
$fileSize = (Get-Item $ArchivePath).Length
|
||||
if ($fileSize -lt 1000) {
|
||||
Write-Log "Archive file too small ($fileSize bytes), probably corrupted" "ERROR"
|
||||
return $false
|
||||
}
|
||||
|
||||
if (!(Test-Path $DestinationPath)) {
|
||||
New-Item -ItemType Directory -Path $DestinationPath -Force | Out-Null
|
||||
}
|
||||
|
||||
$existingFiles = Get-ChildItem -Path $DestinationPath -File | Where-Object { $_.Extension -ne ".zip" }
|
||||
$existingDirs = Get-ChildItem -Path $DestinationPath -Directory
|
||||
|
||||
if ($existingFiles.Count -gt 0 -or $existingDirs.Count -gt 0) {
|
||||
Write-Log "Clearing existing files in destination directory" "WARN"
|
||||
$existingFiles | Remove-Item -Force -ErrorAction SilentlyContinue
|
||||
$existingDirs | Remove-Item -Recurse -Force -ErrorAction SilentlyContinue
|
||||
}
|
||||
|
||||
Add-Type -AssemblyName System.IO.Compression.FileSystem
|
||||
|
||||
try {
|
||||
$zip = [System.IO.Compression.ZipFile]::OpenRead($ArchivePath)
|
||||
$entryCount = $zip.Entries.Count
|
||||
$zip.Dispose()
|
||||
Write-Log "ZIP file validated ($entryCount entries)" "SUCCESS"
|
||||
}
|
||||
catch {
|
||||
Write-Log "ZIP file validation failed: $($_.Exception.Message)" "ERROR"
|
||||
return $false
|
||||
}
|
||||
|
||||
[System.IO.Compression.ZipFile]::ExtractToDirectory($ArchivePath, $DestinationPath)
|
||||
|
||||
Write-Log "Archive extracted successfully" "SUCCESS"
|
||||
return $true
|
||||
}
|
||||
catch {
|
||||
Write-Log "Archive extraction failed: $($_.Exception.Message)" "ERROR"
|
||||
|
||||
if (Test-Path $ArchivePath) {
|
||||
Write-Log "Removing potentially corrupted archive file" "WARN"
|
||||
Remove-Item $ArchivePath -Force -ErrorAction SilentlyContinue
|
||||
}
|
||||
|
||||
return $false
|
||||
}
|
||||
}
|
||||
|
||||
# Function to check Java installation
|
||||
function Check-JavaInstallation {
|
||||
try {
|
||||
$javaVersion = java -version 2>&1 | Select-String "version"
|
||||
if ($javaVersion) {
|
||||
Write-Log "Java is installed: $($javaVersion.ToString().Trim())" "SUCCESS"
|
||||
return $true
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Write-Log "Java is not installed or not in PATH" "WARN"
|
||||
Write-Log "Please download Java from: https://www.java.com/en/download/manual.jsp" "WARN"
|
||||
Write-Log "Look for 'Windows Offline (64-bit)' version" "WARN"
|
||||
|
||||
$response = Read-Host "Do you want to continue without Java? (y/n)"
|
||||
if ($response -eq 'y' -or $response -eq 'Y') {
|
||||
Write-Log "Continuing without Java verification" "WARN"
|
||||
return $true
|
||||
} else {
|
||||
Write-Log "Installation cancelled. Please install Java first." "ERROR"
|
||||
return $false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Function to generate a random hex string (UUID-like without dashes)
|
||||
function New-RandomHexId {
|
||||
param([int]$Length = 32)
|
||||
$bytes = New-Object byte[] ($Length / 2)
|
||||
$rng = [System.Security.Cryptography.RandomNumberGenerator]::Create()
|
||||
$rng.GetBytes($bytes)
|
||||
$rng.Dispose()
|
||||
return ($bytes | ForEach-Object { $_.ToString("x2") }) -join ''
|
||||
}
|
||||
|
||||
# Function to create accounts.json file with offline account
|
||||
function Create-AccountsFile {
|
||||
param(
|
||||
[string]$TargetPath,
|
||||
[string]$PlayerName
|
||||
)
|
||||
|
||||
$accountsPath = Join-Path $TargetPath "accounts.json"
|
||||
$profileId = New-RandomHexId -Length 32
|
||||
$clientToken = New-RandomHexId -Length 32
|
||||
$iat = [int][double]::Parse((Get-Date -UFormat %s))
|
||||
|
||||
$accountsObj = @{
|
||||
accounts = @(
|
||||
@{
|
||||
entitlement = @{
|
||||
canPlayMinecraft = $true
|
||||
ownsMinecraft = $true
|
||||
}
|
||||
"msa-client-id" = ""
|
||||
type = "MSA"
|
||||
},
|
||||
@{
|
||||
active = $true
|
||||
entitlement = @{
|
||||
canPlayMinecraft = $true
|
||||
ownsMinecraft = $true
|
||||
}
|
||||
profile = @{
|
||||
capes = @()
|
||||
id = $profileId
|
||||
name = $PlayerName
|
||||
skin = @{
|
||||
id = ""
|
||||
url = ""
|
||||
variant = ""
|
||||
}
|
||||
}
|
||||
type = "Offline"
|
||||
ygg = @{
|
||||
extra = @{
|
||||
clientToken = $clientToken
|
||||
userName = $PlayerName
|
||||
}
|
||||
iat = $iat
|
||||
token = "0"
|
||||
}
|
||||
}
|
||||
)
|
||||
formatVersion = 3
|
||||
}
|
||||
|
||||
try {
|
||||
$accountsObj | ConvertTo-Json -Depth 10 | Out-File -FilePath $accountsPath -Encoding UTF8
|
||||
Write-Log "Created accounts.json at: $accountsPath" "SUCCESS"
|
||||
Write-Log "Player name: $PlayerName" "SUCCESS"
|
||||
return $true
|
||||
}
|
||||
catch {
|
||||
Write-Log "Failed to create accounts.json: $($_.Exception.Message)" "ERROR"
|
||||
return $false
|
||||
}
|
||||
}
|
||||
|
||||
# Main installation process
|
||||
Write-Log "Starting PrismLauncher and Minecraft client setup"
|
||||
Write-Log "Player name: $Username"
|
||||
Write-Log "Installation path: $InstallPath"
|
||||
|
||||
# Create installation directory and clear if exists (but keep ZIP files)
|
||||
if (Test-Path $InstallPath) {
|
||||
Write-Log "Installation directory exists, clearing contents" "WARN"
|
||||
$existingFiles = Get-ChildItem -Path $InstallPath -File | Where-Object { $_.Extension -ne ".zip" }
|
||||
$existingDirs = Get-ChildItem -Path $InstallPath -Directory
|
||||
|
||||
$existingFiles | Remove-Item -Force -ErrorAction SilentlyContinue
|
||||
$existingDirs | Remove-Item -Recurse -Force -ErrorAction SilentlyContinue
|
||||
} else {
|
||||
New-Item -ItemType Directory -Path $InstallPath -Force | Out-Null
|
||||
Write-Log "Created installation directory: $InstallPath"
|
||||
}
|
||||
|
||||
# Step 1: Download PrismLauncher
|
||||
Write-Log "Step 1: Downloading PrismLauncher..."
|
||||
$launcherUrl = "https://github.com/PrismLauncher/PrismLauncher/releases/download/8.4/PrismLauncher-Windows-MSVC-Portable-8.4.zip"
|
||||
$launcherZip = Join-Path $InstallPath "PrismLauncher-8.4.zip"
|
||||
$launcherExtractPath = Join-Path $InstallPath "PrismLauncher-Windows-MSVC-Portable-8.4"
|
||||
|
||||
if (!(Download-File -Url $launcherUrl -OutputPath $launcherZip)) {
|
||||
Write-Log "Failed to download PrismLauncher. Exiting." "ERROR"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Step 2: Extract PrismLauncher
|
||||
Write-Log "Step 2: Extracting PrismLauncher..."
|
||||
if (!(Extract-Archive -ArchivePath $launcherZip -DestinationPath $InstallPath)) {
|
||||
Write-Log "Failed to extract PrismLauncher. Exiting." "ERROR"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Find the actual extracted directory
|
||||
$extractedDirs = Get-ChildItem -Path $InstallPath -Directory | Where-Object { $_.Name -like "*PrismLauncher*" }
|
||||
if ($extractedDirs.Count -gt 0) {
|
||||
$launcherExtractPath = $extractedDirs[0].FullName
|
||||
Write-Log "Found PrismLauncher directory: $launcherExtractPath" "SUCCESS"
|
||||
} else {
|
||||
Write-Log "Could not find extracted PrismLauncher directory. Checking for direct extraction..." "WARN"
|
||||
$prismExe = Join-Path $InstallPath "prismlauncher.exe"
|
||||
if (Test-Path $prismExe) {
|
||||
$launcherExtractPath = $InstallPath
|
||||
Write-Log "PrismLauncher extracted directly to: $launcherExtractPath" "SUCCESS"
|
||||
} else {
|
||||
Write-Log "Failed to locate PrismLauncher files. Exiting." "ERROR"
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
# Step 3: Create accounts.json in InstallPath (only if username provided)
|
||||
if ($Username -and $Username -ne "web-user") {
|
||||
Write-Log "Step 3: Creating accounts configuration for $Username..."
|
||||
if (!(Create-AccountsFile -TargetPath $InstallPath -PlayerName $Username)) {
|
||||
Write-Log "Failed to create accounts.json. Exiting." "ERROR"
|
||||
exit 1
|
||||
}
|
||||
} else {
|
||||
Write-Log "Step 3: No username provided, skipping accounts.json creation" "WARN"
|
||||
}
|
||||
|
||||
# Step 4: Check Java installation
|
||||
Write-Log "Step 4: Checking Java installation..."
|
||||
if (!(Check-JavaInstallation)) {
|
||||
Write-Log "Java check failed. Exiting." "ERROR"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Step 5: Download Minecraft client
|
||||
Write-Log "Step 5: Downloading Minecraft client..."
|
||||
$minecraftUrl = "https://minecraft.hexor.cy/clients/1.12.2.zip"
|
||||
$minecraftZip = Join-Path $InstallPath "minecraft-1.12.2.zip"
|
||||
$instancesPath = Join-Path $launcherExtractPath "instances"
|
||||
$hexorPath = Join-Path $instancesPath "Hexor"
|
||||
|
||||
if (!(Download-File -Url $minecraftUrl -OutputPath $minecraftZip)) {
|
||||
Write-Log "Failed to download Minecraft client. Exiting." "ERROR"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Step 6: Create instances directory and extract Minecraft client
|
||||
Write-Log "Step 6: Setting up Minecraft client..."
|
||||
if (!(Test-Path $instancesPath)) {
|
||||
New-Item -ItemType Directory -Path $instancesPath -Force | Out-Null
|
||||
Write-Log "Created instances directory"
|
||||
}
|
||||
|
||||
if (!(Test-Path $hexorPath)) {
|
||||
New-Item -ItemType Directory -Path $hexorPath -Force | Out-Null
|
||||
Write-Log "Created Hexor instance directory"
|
||||
}
|
||||
|
||||
if (!(Extract-Archive -ArchivePath $minecraftZip -DestinationPath $hexorPath)) {
|
||||
Write-Log "Failed to extract Minecraft client. Exiting." "ERROR"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Step 7: Cleanup temporary files
|
||||
Write-Log "Step 7: Cleaning up temporary files..."
|
||||
try {
|
||||
Remove-Item $launcherZip -Force
|
||||
Remove-Item $minecraftZip -Force
|
||||
Write-Log "Temporary files cleaned up" "SUCCESS"
|
||||
}
|
||||
catch {
|
||||
Write-Log "Could not remove temporary files: $($_.Exception.Message)" "WARN"
|
||||
}
|
||||
|
||||
# Final success message
|
||||
Write-Log "=== INSTALLATION COMPLETED SUCCESSFULLY ===" "SUCCESS"
|
||||
Write-Log "PrismLauncher location: $launcherExtractPath" "SUCCESS"
|
||||
Write-Log "Executable: $(Join-Path $launcherExtractPath 'prismlauncher.exe')" "SUCCESS"
|
||||
Write-Log "Minecraft client installed in: $hexorPath" "SUCCESS"
|
||||
if ($Username -and $Username -ne "web-user") {
|
||||
Write-Log "Player name configured: $Username" "SUCCESS"
|
||||
Write-Log "Accounts file: $(Join-Path $InstallPath 'accounts.json')" "SUCCESS"
|
||||
}
|
||||
Write-Log ""
|
||||
Write-Log "You can now run PrismLauncher and the Hexor instance should be available!"
|
||||
Write-Log "If Java was not installed, please download it from: https://www.java.com/en/download/manual.jsp"
|
||||
|
||||
# Ask if user wants to launch the game
|
||||
$launchResponse = Read-Host "Do you want to launch PrismLauncher now? (y/n)"
|
||||
if ($launchResponse -eq 'y' -or $launchResponse -eq 'Y') {
|
||||
$launcherExe = Join-Path $launcherExtractPath "prismlauncher.exe"
|
||||
if (Test-Path $launcherExe) {
|
||||
Write-Log "Launching PrismLauncher..." "INFO"
|
||||
Start-Process -FilePath $launcherExe -WorkingDirectory $launcherExtractPath
|
||||
} else {
|
||||
Write-Log "Launcher executable not found!" "ERROR"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,69 +1,3 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: nginx-config
|
||||
namespace: minecraft
|
||||
data:
|
||||
nginx.conf: |
|
||||
user nginx;
|
||||
worker_processes 1;
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
|
||||
# Custom 502 error page with auto-refresh
|
||||
error_page 502 /502.html;
|
||||
location = /502.html {
|
||||
internal;
|
||||
return 200 '<!DOCTYPE html><html><head><meta charset="utf-8"><title>Server Loading</title><style>body{font-family:Arial,sans-serif;text-align:center;margin-top:100px;background:#f0f0f0}h1{color:#333}p{color:#666;font-size:18px}</style></head><body><h1>Server is loading probably...</h1><p>Please wait a moment and try refreshing the page.</p><script>setTimeout(function(){window.location.reload();}, 10000);</script></body></html>';
|
||||
add_header Content-Type text/html;
|
||||
}
|
||||
|
||||
# Main location - proxy to Minecraft Dynmap
|
||||
location / {
|
||||
# Proxy configuration for Dynmap server
|
||||
proxy_pass http://localhost:8123;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
|
||||
# Inject user authentication meta tag into HTML head
|
||||
sub_filter '<head>' '<head><meta name="remote-user" content="$http_x_authentik_username">';
|
||||
|
||||
# Replace default Dynmap title with custom server name
|
||||
sub_filter 'Minecraft Dynamic Map' "Hexor's MC server";
|
||||
|
||||
# Inject all custom content before closing body tag (single replacement)
|
||||
sub_filter "</body>" '<script>function getUsername(){var headers=document.querySelectorAll("meta");for(var i=0;i<headers.length;i++){if(headers[i].getAttribute("name")==="remote-user"){return headers[i].getAttribute("content");}}var jwt=document.cookie.split("; ").find(row=>row.startsWith("authentik_session="));if(jwt){try{var token=jwt.split("=")[1];var payload=JSON.parse(atob(token.split(".")[1]));return payload.sub||payload.username||"web-user";}catch(e){}}return "web-user";}var username=getUsername();console.log("Username found:", username);if(username && username!=="web-user" && window.location.search.indexOf("playername=")===-1){var currentUrl=new URL(window.location.href);currentUrl.searchParams.set("playername",username);console.log("Redirecting to:", currentUrl.href);window.location.href=currentUrl.href;}document.addEventListener("DOMContentLoaded",function(){var userBlock=document.createElement("div");userBlock.style.cssText="background-color: #CEC6CB; color: black; padding: 8px; text-align: center; font-size: medium; border-radius: 4px; position: absolute; top: 10px; right: 150px; max-width: 200px;";userBlock.innerHTML="Logged in as: <b>"+username+"</b>";document.body.appendChild(userBlock);});</script><p style="background-color: #CEC6CB; color: black; padding: 10px 10px; text-align: center; font-size: large; text-decoration: none; display: inline-block; border-radius: 4px; position: absolute; top: 10px; left: 150px;">GEYMERSKIY SOYUZ Server <br>Get <a href="https://github.com/PrismLauncher/PrismLauncher/releases/tag/8.4" >Prism Launcher</a> and <a href="/clients/1.12.2.zip" >client.zip</a> for this server. Server address <b>minecraft.hexor.cy:30565</b><br><br><a href="#" onclick="showInstallModal(); return false;" style="color: black; text-decoration: underline;">Windows Install Script</a></p><div id="installModal" style="display: none; position: fixed; z-index: 1000; left: 0; top: 0; width: 100%; height: 100%; background-color: rgba(0,0,0,0.5);"><div style="background-color: #CEC6CB; margin: 15% auto; padding: 10px; border-radius: 4px; width: 70%; max-width: 500px; text-align: center; color: black; font-size: large;"><h3 style="margin-top: 0; color: black;">Windows Installation</h3><p style="color: black;">Copy and paste this command into PowerShell:</p><textarea id="scriptCommand" readonly style="width: 90%; height: 60px; font-family: monospace; padding: 8px; border: 1px solid #888; border-radius: 4px; resize: none; background-color: white; color: black;">iwr -useb https://minecraft.hexor.cy/clients/win-install.ps1 | iex</textarea><br><br><button id="copyButton" onclick="copyToClipboard()" style="background-color: #CEC6CB; color: black; padding: 10px 15px; border: 1px solid #888; border-radius: 4px; cursor: pointer; margin-right: 10px; font-size: large; text-decoration: none;">Copy</button><button onclick="closeInstallModal()" style="background-color: #CEC6CB; color: black; padding: 10px 15px; border: 1px solid #888; border-radius: 4px; cursor: pointer; font-size: large; text-decoration: none;">Close</button></div></div><script>function showInstallModal() { document.getElementById("installModal").style.display = "block"; } function closeInstallModal() { document.getElementById("installModal").style.display = "none"; } function copyToClipboard() { var textarea = document.getElementById("scriptCommand"); textarea.select(); textarea.setSelectionRange(0, 99999); if (document.execCommand("copy")) { var button = document.getElementById("copyButton"); button.style.borderColor = "#4CAF50"; setTimeout(function() { button.style.borderColor = "#888"; }, 2000); } } window.onclick = function(event) { var modal = document.getElementById("installModal"); if (event.target == modal) { closeInstallModal(); } }</script></body>';
|
||||
|
||||
# Apply sub_filter replacements globally (not just once)
|
||||
sub_filter_once off;
|
||||
}
|
||||
|
||||
# Static file serving for client downloads
|
||||
location /clients/ {
|
||||
alias /mc/clients/;
|
||||
sendfile on; # Enable efficient file serving
|
||||
add_header Content-Disposition "attachment"; # Force download
|
||||
autoindex on; # Enable directory listing
|
||||
gzip off; # Disable compression for downloads
|
||||
chunked_transfer_encoding off; # Disable chunked encoding
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
@@ -94,6 +28,9 @@ spec:
|
||||
- name: nginx-config
|
||||
configMap:
|
||||
name: nginx-config
|
||||
- name: client-scripts
|
||||
configMap:
|
||||
name: client-scripts
|
||||
|
||||
terminationGracePeriodSeconds: 10
|
||||
containers:
|
||||
@@ -142,6 +79,22 @@ spec:
|
||||
- name: webstatus-mod
|
||||
containerPort: 8080
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 8123
|
||||
initialDelaySeconds: 120
|
||||
periodSeconds: 30
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 8123
|
||||
initialDelaySeconds: 60
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
volumeMounts:
|
||||
- name: storage
|
||||
mountPath: /mc
|
||||
@@ -162,6 +115,12 @@ spec:
|
||||
subPath: nginx.conf
|
||||
- name: storage
|
||||
mountPath: /mc
|
||||
- name: client-scripts
|
||||
mountPath: /mc/clients/win-install.ps1
|
||||
subPath: win-install.ps1
|
||||
- name: client-scripts
|
||||
mountPath: /mc/clients/inject.js
|
||||
subPath: inject.js
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
|
||||
@@ -5,4 +5,4 @@ resources:
|
||||
- app.yaml
|
||||
- deployments.yaml
|
||||
- services.yaml
|
||||
#- ingress.yaml
|
||||
- configmaps.yaml
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
data "authentik_flow" "default_authorization_flow" {
|
||||
slug = var.default_authorization_flow
|
||||
}
|
||||
@@ -299,7 +298,7 @@ resource "authentik_outpost" "outposts" {
|
||||
kubernetes_ingress_class_name = null
|
||||
kubernetes_disabled_components = []
|
||||
kubernetes_ingress_annotations = {}
|
||||
kubernetes_ingress_secret_name = "authentik-outpost-tls"
|
||||
kubernetes_ingress_secret_name = "idm-tls"
|
||||
})
|
||||
|
||||
depends_on = [
|
||||
|
||||
@@ -51,6 +51,9 @@ proxy_applications = {
|
||||
internal_host = "http://secret-reader.k8s-secret.svc:80"
|
||||
internal_host_ssl_validation = false
|
||||
meta_description = ""
|
||||
skip_path_regex = <<-EOT
|
||||
/webhook
|
||||
EOT
|
||||
meta_icon = "https://img.icons8.com/ios-filled/50/password.png"
|
||||
mode = "proxy"
|
||||
outpost = "kubernetes-outpost"
|
||||
@@ -186,6 +189,9 @@ EOT
|
||||
meta_icon = "https://img.icons8.com/color/48/minecraft-grass-cube.png"
|
||||
mode = "proxy"
|
||||
outpost = "kubernetes-outpost"
|
||||
skip_path_regex = <<-EOT
|
||||
/clients
|
||||
EOT
|
||||
}
|
||||
"pasarguard" = {
|
||||
name = "PasarGuard"
|
||||
@@ -196,6 +202,7 @@ EOT
|
||||
internal_host_ssl_validation = false
|
||||
meta_description = ""
|
||||
skip_path_regex = <<-EOT
|
||||
/
|
||||
/sub/
|
||||
/dashboard/
|
||||
/api/
|
||||
|
||||
Reference in New Issue
Block a user