Compare commits
42 Commits
auto-updat
...
4ea48f0f94
| Author | SHA1 | Date | |
|---|---|---|---|
| 4ea48f0f94 | |||
| 4bfc35d8e2 | |||
| 46c0fab78a | |||
| 6dc43149f4 | |||
| ca1efe6230 | |||
| e90d2c9dc5 | |||
| a884c2b969 | |||
| db92976872 | |||
|
|
d924ebd3ee | ||
|
|
4b30185655 | ||
|
|
a65b37f000 | ||
|
|
f394b4f9da | ||
| 5d12fc854a | |||
|
|
f415e0711e | ||
| 14dc69904c | |||
| f6dc7aa6e3 | |||
| badd82f9af | |||
| a5cb49471a | |||
| 79c23e14b0 | |||
| 5bc44e45b0 | |||
|
|
4a80f2f596 | ||
| b58461232c | |||
| be6e601275 | |||
| 063a4a502b | |||
|
|
22382b63a1 | ||
|
|
718709115f | ||
|
|
df78728137 | ||
|
|
9671dc3aa8 | ||
|
|
609fd9d522 | ||
|
|
0e66f4bddd | ||
| 9f35d520b6 | |||
| 512e993f7b | |||
| b6b6e23026 | |||
| bd997082e9 | |||
| 0689b6abc8 | |||
| bbeac646e9 | |||
| 5d4d620681 | |||
| 5cf2a32ca0 | |||
| 35c072471a | |||
| 9c0458c020 | |||
| 28d5bcfabd | |||
| 65b2aaa91d |
@@ -22,12 +22,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Install Python dependencies
|
- name: Install Python dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install pyyaml
|
python3 -m venv .venv
|
||||||
|
.venv/bin/pip install pyyaml
|
||||||
|
|
||||||
- name: Generate K8s Services Wiki
|
- name: Generate K8s Services Wiki
|
||||||
run: |
|
run: |
|
||||||
echo "📋 Starting K8s wiki generation..."
|
echo "📋 Starting K8s wiki generation..."
|
||||||
python3 .gitea/scripts/generate-k8s-wiki.py k8s/ Kubernetes-Services.md
|
.venv/bin/python .gitea/scripts/generate-k8s-wiki.py k8s/ Kubernetes-Services.md
|
||||||
|
|
||||||
if [ -f "Kubernetes-Services.md" ]; then
|
if [ -f "Kubernetes-Services.md" ]; then
|
||||||
echo "✅ Wiki content generated successfully"
|
echo "✅ Wiki content generated successfully"
|
||||||
|
|||||||
@@ -56,6 +56,7 @@ ArgoCD homelab project
|
|||||||
| **tg-bots** | [](https://ag.hexor.cy/applications/argocd/tg-bots) |
|
| **tg-bots** | [](https://ag.hexor.cy/applications/argocd/tg-bots) |
|
||||||
| **vaultwarden** | [](https://ag.hexor.cy/applications/argocd/vaultwarden) |
|
| **vaultwarden** | [](https://ag.hexor.cy/applications/argocd/vaultwarden) |
|
||||||
| **vpn** | [](https://ag.hexor.cy/applications/argocd/vpn) |
|
| **vpn** | [](https://ag.hexor.cy/applications/argocd/vpn) |
|
||||||
|
| **xandikos** | [](https://ag.hexor.cy/applications/argocd/xandikos) |
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
|||||||
@@ -77,8 +77,8 @@ spec:
|
|||||||
labels:
|
labels:
|
||||||
app: gitea-runner
|
app: gitea-runner
|
||||||
spec:
|
spec:
|
||||||
nodeSelector:
|
#nodeSelector:
|
||||||
kubernetes.io/hostname: home.homenet
|
# kubernetes.io/hostname: home.homenet
|
||||||
volumes:
|
volumes:
|
||||||
- name: docker-sock
|
- name: docker-sock
|
||||||
hostPath:
|
hostPath:
|
||||||
@@ -90,27 +90,30 @@ spec:
|
|||||||
affinity:
|
affinity:
|
||||||
nodeAffinity:
|
nodeAffinity:
|
||||||
preferredDuringSchedulingIgnoredDuringExecution:
|
preferredDuringSchedulingIgnoredDuringExecution:
|
||||||
- weight: 3
|
|
||||||
preference:
|
|
||||||
matchExpressions:
|
|
||||||
- key: kubernetes.io/hostname
|
|
||||||
operator: In
|
|
||||||
values:
|
|
||||||
- home.homenet
|
|
||||||
- weight: 1
|
- weight: 1
|
||||||
preference:
|
preference:
|
||||||
matchExpressions:
|
matchExpressions:
|
||||||
- key: kubernetes.io/hostname
|
- key: kubernetes.io/hostname
|
||||||
operator: In
|
operator: In
|
||||||
values:
|
values:
|
||||||
- master.tail2fe2d.ts.net
|
- home.homenet
|
||||||
- weight: 2
|
- weight: 2
|
||||||
preference:
|
preference:
|
||||||
matchExpressions:
|
matchExpressions:
|
||||||
- key: kubernetes.io/hostname
|
- key: kubernetes.io/hostname
|
||||||
operator: In
|
operator: In
|
||||||
values:
|
values:
|
||||||
- nas.homenet
|
- master.tail2fe2d.ts.net
|
||||||
|
- weight: 3
|
||||||
|
preference:
|
||||||
|
matchExpressions:
|
||||||
|
- key: kubernetes.io/hostname
|
||||||
|
operator: In
|
||||||
|
values:
|
||||||
|
- it.tail2fe2d.ts.net
|
||||||
|
- ch.tail2fe2d.ts.net
|
||||||
|
- us.tail2fe2d.ts.net
|
||||||
|
|
||||||
requiredDuringSchedulingIgnoredDuringExecution:
|
requiredDuringSchedulingIgnoredDuringExecution:
|
||||||
nodeSelectorTerms:
|
nodeSelectorTerms:
|
||||||
- matchExpressions:
|
- matchExpressions:
|
||||||
@@ -118,7 +121,9 @@ spec:
|
|||||||
operator: In
|
operator: In
|
||||||
values:
|
values:
|
||||||
- home.homenet
|
- home.homenet
|
||||||
- nas.homenet
|
- it.tail2fe2d.ts.net
|
||||||
|
- ch.tail2fe2d.ts.net
|
||||||
|
- us.tail2fe2d.ts.net
|
||||||
- master.tail2fe2d.ts.net
|
- master.tail2fe2d.ts.net
|
||||||
containers:
|
containers:
|
||||||
- name: gitea-runner
|
- name: gitea-runner
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ spec:
|
|||||||
kubernetes.io/os: linux
|
kubernetes.io/os: linux
|
||||||
containers:
|
containers:
|
||||||
- name: secret-reader
|
- name: secret-reader
|
||||||
image: ultradesu/k8s-secrets:0.1.1
|
image: ultradesu/k8s-secrets:0.2.1
|
||||||
imagePullPolicy: Always
|
imagePullPolicy: Always
|
||||||
args:
|
args:
|
||||||
- "--secrets"
|
- "--secrets"
|
||||||
@@ -28,6 +28,7 @@ spec:
|
|||||||
- "k8s-secret"
|
- "k8s-secret"
|
||||||
- "--port"
|
- "--port"
|
||||||
- "3000"
|
- "3000"
|
||||||
|
- "--webhook"
|
||||||
ports:
|
ports:
|
||||||
- containerPort: 3000
|
- containerPort: 3000
|
||||||
name: http
|
name: http
|
||||||
|
|||||||
@@ -192,10 +192,10 @@ spec:
|
|||||||
resources:
|
resources:
|
||||||
requests:
|
requests:
|
||||||
memory: "128Mi"
|
memory: "128Mi"
|
||||||
cpu: "100m"
|
cpu: "300m"
|
||||||
limits:
|
limits:
|
||||||
memory: "512Mi"
|
memory: "512Mi"
|
||||||
cpu: "750m"
|
cpu: "1000m"
|
||||||
volumeMounts:
|
volumeMounts:
|
||||||
- name: shared-data
|
- name: shared-data
|
||||||
mountPath: /shared
|
mountPath: /shared
|
||||||
|
|||||||
@@ -113,7 +113,7 @@ spec:
|
|||||||
mountPath: /scripts
|
mountPath: /scripts
|
||||||
containers:
|
containers:
|
||||||
- name: pasarguard-node
|
- name: pasarguard-node
|
||||||
image: 'pasarguard/node:v0.1.3'
|
image: 'pasarguard/node:v0.1.4'
|
||||||
imagePullPolicy: Always
|
imagePullPolicy: Always
|
||||||
command:
|
command:
|
||||||
- /bin/sh
|
- /bin/sh
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ spec:
|
|||||||
DB_HOST: psql.psql.svc
|
DB_HOST: psql.psql.svc
|
||||||
DB_USER: mmdl
|
DB_USER: mmdl
|
||||||
DB_NAME: mmdl
|
DB_NAME: mmdl
|
||||||
DB_PORT: 5432
|
DB_PORT: "5432"
|
||||||
DB_PASS: |-
|
DB_PASS: |-
|
||||||
{{ .pg_pass }}
|
{{ .pg_pass }}
|
||||||
AES_PASSWORD: |-
|
AES_PASSWORD: |-
|
||||||
|
|||||||
@@ -47,3 +47,20 @@ spec:
|
|||||||
server: https://kubernetes.default.svc
|
server: https://kubernetes.default.svc
|
||||||
sourceRepos:
|
sourceRepos:
|
||||||
- ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
- ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||||
|
|
||||||
|
---
|
||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: AppProject
|
||||||
|
metadata:
|
||||||
|
name: desktop
|
||||||
|
namespace: argocd
|
||||||
|
spec:
|
||||||
|
clusterResourceWhitelist:
|
||||||
|
- group: '*'
|
||||||
|
kind: '*'
|
||||||
|
description: Hexor Home Lab Desktop Apps
|
||||||
|
destinations:
|
||||||
|
- namespace: '*'
|
||||||
|
server: https://kubernetes.default.svc
|
||||||
|
sourceRepos:
|
||||||
|
- ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||||
|
|||||||
@@ -2,8 +2,8 @@
|
|||||||
|
|
||||||
global:
|
global:
|
||||||
domain: ag.hexor.cy
|
domain: ag.hexor.cy
|
||||||
nodeSelector:
|
nodeSelector: &nodeSelector
|
||||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
kubernetes.io/hostname: ch.tail2fe2d.ts.net
|
||||||
logging:
|
logging:
|
||||||
format: text
|
format: text
|
||||||
level: info
|
level: info
|
||||||
@@ -55,15 +55,15 @@ configs:
|
|||||||
|
|
||||||
controller:
|
controller:
|
||||||
replicas: 1
|
replicas: 1
|
||||||
nodeSelector:
|
nodeSelector:
|
||||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
<<: *nodeSelector
|
||||||
# Add resources (requests/limits), PDB etc. if needed
|
# Add resources (requests/limits), PDB etc. if needed
|
||||||
|
|
||||||
# Dex OIDC provider
|
# Dex OIDC provider
|
||||||
dex:
|
dex:
|
||||||
replicas: 1
|
replicas: 1
|
||||||
nodeSelector:
|
nodeSelector:
|
||||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
<<: *nodeSelector
|
||||||
enabled: false
|
enabled: false
|
||||||
|
|
||||||
# Standard Redis disabled because Redis HA is enabled
|
# Standard Redis disabled because Redis HA is enabled
|
||||||
@@ -86,7 +86,7 @@ redis-ha:
|
|||||||
server:
|
server:
|
||||||
replicas: 1
|
replicas: 1
|
||||||
nodeSelector:
|
nodeSelector:
|
||||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
<<: *nodeSelector
|
||||||
ingress:
|
ingress:
|
||||||
enabled: false
|
enabled: false
|
||||||
|
|
||||||
@@ -99,8 +99,11 @@ server:
|
|||||||
# Repository Server
|
# Repository Server
|
||||||
repoServer:
|
repoServer:
|
||||||
replicas: 1
|
replicas: 1
|
||||||
|
livenessProbe:
|
||||||
|
timeoutSeconds: 10
|
||||||
|
periodSeconds: 60
|
||||||
nodeSelector:
|
nodeSelector:
|
||||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
<<: *nodeSelector
|
||||||
# Add resources (requests/limits), PDB etc. if needed
|
# Add resources (requests/limits), PDB etc. if needed
|
||||||
|
|
||||||
# ApplicationSet Controller
|
# ApplicationSet Controller
|
||||||
@@ -108,7 +111,7 @@ applicationSet:
|
|||||||
enabled: true # Enabled by default
|
enabled: true # Enabled by default
|
||||||
replicas: 1
|
replicas: 1
|
||||||
nodeSelector:
|
nodeSelector:
|
||||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
<<: *nodeSelector
|
||||||
# Add resources (requests/limits), PDB etc. if needed
|
# Add resources (requests/limits), PDB etc. if needed
|
||||||
|
|
||||||
# Notifications Controller
|
# Notifications Controller
|
||||||
@@ -116,5 +119,5 @@ notifications:
|
|||||||
enabled: true # Enabled by default
|
enabled: true # Enabled by default
|
||||||
replicas: 1
|
replicas: 1
|
||||||
nodeSelector:
|
nodeSelector:
|
||||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
<<: *nodeSelector
|
||||||
# Add notifiers, triggers, templates configurations if needed
|
# Add notifiers, triggers, templates configurations if needed
|
||||||
|
|||||||
@@ -42,10 +42,10 @@ spec:
|
|||||||
resources:
|
resources:
|
||||||
requests:
|
requests:
|
||||||
memory: "128Mi"
|
memory: "128Mi"
|
||||||
cpu: "100m"
|
cpu: "300m"
|
||||||
limits:
|
limits:
|
||||||
memory: "512Mi"
|
memory: "512Mi"
|
||||||
cpu: "500m"
|
cpu: "800m"
|
||||||
env:
|
env:
|
||||||
- name: BW_HOST
|
- name: BW_HOST
|
||||||
valueFrom:
|
valueFrom:
|
||||||
|
|||||||
@@ -79,3 +79,83 @@ spec:
|
|||||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||||
property: fields[2].value
|
property: fields[2].value
|
||||||
|
|
||||||
|
---
|
||||||
|
apiVersion: external-secrets.io/v1
|
||||||
|
kind: ExternalSecret
|
||||||
|
metadata:
|
||||||
|
name: alertmanager-telegram
|
||||||
|
spec:
|
||||||
|
target:
|
||||||
|
name: alertmanager-telegram-secret
|
||||||
|
deletionPolicy: Delete
|
||||||
|
template:
|
||||||
|
type: Opaque
|
||||||
|
data:
|
||||||
|
TELEGRAM_BOT_TOKEN: |-
|
||||||
|
{{ .bot_token }}
|
||||||
|
TELEGRAM_CHAT_ID: |-
|
||||||
|
{{ .chat_id }}
|
||||||
|
data:
|
||||||
|
- secretKey: bot_token
|
||||||
|
sourceRef:
|
||||||
|
storeRef:
|
||||||
|
name: vaultwarden-login
|
||||||
|
kind: ClusterSecretStore
|
||||||
|
remoteRef:
|
||||||
|
conversionStrategy: Default
|
||||||
|
decodingStrategy: None
|
||||||
|
metadataPolicy: None
|
||||||
|
key: eca0fb0b-3939-40a8-890a-6294863e5a65
|
||||||
|
property: fields[0].value
|
||||||
|
- secretKey: chat_id
|
||||||
|
sourceRef:
|
||||||
|
storeRef:
|
||||||
|
name: vaultwarden-login
|
||||||
|
kind: ClusterSecretStore
|
||||||
|
remoteRef:
|
||||||
|
conversionStrategy: Default
|
||||||
|
decodingStrategy: None
|
||||||
|
metadataPolicy: None
|
||||||
|
key: eca0fb0b-3939-40a8-890a-6294863e5a65
|
||||||
|
property: fields[1].value
|
||||||
|
|
||||||
|
---
|
||||||
|
apiVersion: external-secrets.io/v1
|
||||||
|
kind: ExternalSecret
|
||||||
|
metadata:
|
||||||
|
name: grafana-telegram
|
||||||
|
spec:
|
||||||
|
target:
|
||||||
|
name: grafana-telegram
|
||||||
|
deletionPolicy: Delete
|
||||||
|
template:
|
||||||
|
type: Opaque
|
||||||
|
data:
|
||||||
|
bot-token: |-
|
||||||
|
{{ .bot_token }}
|
||||||
|
chat-id: |-
|
||||||
|
{{ .chat_id }}
|
||||||
|
data:
|
||||||
|
- secretKey: bot_token
|
||||||
|
sourceRef:
|
||||||
|
storeRef:
|
||||||
|
name: vaultwarden-login
|
||||||
|
kind: ClusterSecretStore
|
||||||
|
remoteRef:
|
||||||
|
conversionStrategy: Default
|
||||||
|
decodingStrategy: None
|
||||||
|
metadataPolicy: None
|
||||||
|
key: eca0fb0b-3939-40a8-890a-6294863e5a65
|
||||||
|
property: fields[0].value
|
||||||
|
- secretKey: chat_id
|
||||||
|
sourceRef:
|
||||||
|
storeRef:
|
||||||
|
name: vaultwarden-login
|
||||||
|
kind: ClusterSecretStore
|
||||||
|
remoteRef:
|
||||||
|
conversionStrategy: Default
|
||||||
|
decodingStrategy: None
|
||||||
|
metadataPolicy: None
|
||||||
|
key: eca0fb0b-3939-40a8-890a-6294863e5a65
|
||||||
|
property: fields[1].value
|
||||||
|
|
||||||
|
|||||||
69
k8s/core/prom-stack/grafana-alerting.yaml
Normal file
69
k8s/core/prom-stack/grafana-alerting.yaml
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
rules.yaml: |
|
||||||
|
apiVersion: 1
|
||||||
|
groups:
|
||||||
|
- orgId: 1
|
||||||
|
name: pasarguard_alerts
|
||||||
|
folder: Kubernetes
|
||||||
|
interval: 1m
|
||||||
|
rules:
|
||||||
|
- uid: pasarguard_cpu_throttling
|
||||||
|
title: VPN CPU Throttle
|
||||||
|
condition: A
|
||||||
|
data:
|
||||||
|
- refId: A
|
||||||
|
relativeTimeRange:
|
||||||
|
from: 600
|
||||||
|
to: 0
|
||||||
|
datasourceUid: prometheus
|
||||||
|
model:
|
||||||
|
expr: 'rate(container_cpu_cfs_throttled_periods_total{container="pasarguard-node"}[5m]) > 0.1'
|
||||||
|
refId: A
|
||||||
|
noDataState: NoData
|
||||||
|
execErrState: Alerting
|
||||||
|
for: 5m
|
||||||
|
annotations:
|
||||||
|
description: 'Throttling rate: {{ printf "%.2f" $values.A.Value }}'
|
||||||
|
summary: 'VPN node throttling CPU on {{ $labels.node }}'
|
||||||
|
labels:
|
||||||
|
severity: warning
|
||||||
|
|
||||||
|
contactpoints.yaml: |
|
||||||
|
apiVersion: 1
|
||||||
|
contactPoints:
|
||||||
|
- orgId: 1
|
||||||
|
name: telegram
|
||||||
|
receivers:
|
||||||
|
- uid: telegram_default
|
||||||
|
type: telegram
|
||||||
|
settings:
|
||||||
|
bottoken: $TELEGRAM_BOT_TOKEN
|
||||||
|
chatid: $TELEGRAM_CHAT_ID
|
||||||
|
message: |
|
||||||
|
{{ if eq .Status "firing" }}🔥 FIRING{{ else }}✅ RESOLVED{{ end }}
|
||||||
|
|
||||||
|
{{ range .Alerts }}
|
||||||
|
📊 <b>{{ .Labels.alertname }}</b>
|
||||||
|
{{ if .Annotations.summary }}{{ .Annotations.summary }}{{ end }}
|
||||||
|
|
||||||
|
🎯 <b>Details:</b>
|
||||||
|
• Pod: <code>{{ .Labels.pod }}</code>
|
||||||
|
• Node: <code>{{ .Labels.node }}</code>
|
||||||
|
• Namespace: <code>{{ .Labels.namespace }}</code>
|
||||||
|
{{ if .Annotations.description }}• {{ .Annotations.description }}{{ end }}
|
||||||
|
|
||||||
|
🔗 <a href="{{ .GeneratorURL }}">View in Grafana</a>
|
||||||
|
{{ end }}
|
||||||
|
parse_mode: HTML
|
||||||
|
disableResolveMessage: false
|
||||||
|
|
||||||
|
policies.yaml: |
|
||||||
|
apiVersion: 1
|
||||||
|
policies:
|
||||||
|
- orgId: 1
|
||||||
|
receiver: telegram
|
||||||
|
group_by:
|
||||||
|
- grafana_folder
|
||||||
|
- alertname
|
||||||
|
group_wait: 10s
|
||||||
|
group_interval: 5m
|
||||||
|
repeat_interval: 4h
|
||||||
@@ -38,6 +38,10 @@ datasources:
|
|||||||
url: http://prometheus-kube-prometheus-prometheus.prometheus.svc:9090
|
url: http://prometheus-kube-prometheus-prometheus.prometheus.svc:9090
|
||||||
access: proxy
|
access: proxy
|
||||||
isDefault: true
|
isDefault: true
|
||||||
|
- name: Loki
|
||||||
|
type: loki
|
||||||
|
url: http://loki-gateway.prometheus.svc:80
|
||||||
|
access: proxy
|
||||||
|
|
||||||
ingress:
|
ingress:
|
||||||
enabled: true
|
enabled: true
|
||||||
@@ -52,3 +56,19 @@ ingress:
|
|||||||
hosts:
|
hosts:
|
||||||
- '*.hexor.cy'
|
- '*.hexor.cy'
|
||||||
|
|
||||||
|
extraConfigmapMounts:
|
||||||
|
- name: grafana-alerting
|
||||||
|
mountPath: /etc/grafana/provisioning/alerting
|
||||||
|
configMap: grafana-alerting
|
||||||
|
readOnly: true
|
||||||
|
|
||||||
|
envValueFrom:
|
||||||
|
TELEGRAM_BOT_TOKEN:
|
||||||
|
secretKeyRef:
|
||||||
|
name: grafana-telegram
|
||||||
|
key: bot-token
|
||||||
|
TELEGRAM_CHAT_ID:
|
||||||
|
secretKeyRef:
|
||||||
|
name: grafana-telegram
|
||||||
|
key: chat-id
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,12 @@ resources:
|
|||||||
- persistentVolume.yaml
|
- persistentVolume.yaml
|
||||||
- external-secrets.yaml
|
- external-secrets.yaml
|
||||||
|
|
||||||
|
configMapGenerator:
|
||||||
|
- name: grafana-alerting
|
||||||
|
namespace: prometheus
|
||||||
|
files:
|
||||||
|
- grafana-alerting.yaml
|
||||||
|
|
||||||
helmCharts:
|
helmCharts:
|
||||||
- name: kube-prometheus-stack
|
- name: kube-prometheus-stack
|
||||||
repo: https://prometheus-community.github.io/helm-charts
|
repo: https://prometheus-community.github.io/helm-charts
|
||||||
@@ -23,3 +29,18 @@ helmCharts:
|
|||||||
valuesFile: grafana-values.yaml
|
valuesFile: grafana-values.yaml
|
||||||
includeCRDs: true
|
includeCRDs: true
|
||||||
|
|
||||||
|
- name: loki
|
||||||
|
repo: https://grafana.github.io/helm-charts
|
||||||
|
version: 6.29.0
|
||||||
|
releaseName: loki
|
||||||
|
namespace: prometheus
|
||||||
|
valuesFile: loki-values.yaml
|
||||||
|
includeCRDs: true
|
||||||
|
|
||||||
|
- name: promtail
|
||||||
|
repo: https://grafana.github.io/helm-charts
|
||||||
|
version: 6.16.6
|
||||||
|
releaseName: promtail
|
||||||
|
namespace: prometheus
|
||||||
|
valuesFile: promtail-values.yaml
|
||||||
|
|
||||||
|
|||||||
75
k8s/core/prom-stack/loki-values.yaml
Normal file
75
k8s/core/prom-stack/loki-values.yaml
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
# Loki SingleBinary mode - optimal for homelab
|
||||||
|
deploymentMode: SingleBinary
|
||||||
|
|
||||||
|
loki:
|
||||||
|
auth_enabled: false
|
||||||
|
commonConfig:
|
||||||
|
replication_factor: 1
|
||||||
|
path_prefix: /var/loki
|
||||||
|
schemaConfig:
|
||||||
|
configs:
|
||||||
|
- from: 2024-01-01
|
||||||
|
store: tsdb
|
||||||
|
object_store: filesystem
|
||||||
|
schema: v13
|
||||||
|
index:
|
||||||
|
prefix: index_
|
||||||
|
period: 24h
|
||||||
|
storage:
|
||||||
|
type: filesystem
|
||||||
|
filesystem:
|
||||||
|
chunks_directory: /var/loki/chunks
|
||||||
|
rules_directory: /var/loki/rules
|
||||||
|
limits_config:
|
||||||
|
reject_old_samples: false
|
||||||
|
ingestion_rate_mb: 16
|
||||||
|
ingestion_burst_size_mb: 32
|
||||||
|
max_query_parallelism: 32
|
||||||
|
volume_enabled: true
|
||||||
|
|
||||||
|
singleBinary:
|
||||||
|
replicas: 1
|
||||||
|
nodeSelector:
|
||||||
|
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||||
|
persistence:
|
||||||
|
enabled: true
|
||||||
|
size: 50Gi
|
||||||
|
storageClass: ""
|
||||||
|
|
||||||
|
# Disable distributed mode components
|
||||||
|
read:
|
||||||
|
replicas: 0
|
||||||
|
write:
|
||||||
|
replicas: 0
|
||||||
|
backend:
|
||||||
|
replicas: 0
|
||||||
|
|
||||||
|
# Disable memcached (not needed for SingleBinary)
|
||||||
|
chunksCache:
|
||||||
|
enabled: false
|
||||||
|
resultsCache:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
# Gateway for Loki access
|
||||||
|
gateway:
|
||||||
|
enabled: true
|
||||||
|
replicas: 1
|
||||||
|
service:
|
||||||
|
type: ClusterIP
|
||||||
|
|
||||||
|
# Disable tests and canary
|
||||||
|
test:
|
||||||
|
enabled: false
|
||||||
|
lokiCanary:
|
||||||
|
enabled: false
|
||||||
|
|
||||||
|
# Monitoring
|
||||||
|
monitoring:
|
||||||
|
dashboards:
|
||||||
|
enabled: false
|
||||||
|
rules:
|
||||||
|
enabled: false
|
||||||
|
serviceMonitor:
|
||||||
|
enabled: false
|
||||||
|
selfMonitoring:
|
||||||
|
enabled: false
|
||||||
@@ -1,5 +1,35 @@
|
|||||||
grafana:
|
grafana:
|
||||||
enabled: false
|
enabled: false
|
||||||
|
|
||||||
|
alertmanager:
|
||||||
|
config:
|
||||||
|
global:
|
||||||
|
telegram_api_url: "https://api.telegram.org"
|
||||||
|
route:
|
||||||
|
group_by: ['alertname', 'cluster', 'service']
|
||||||
|
group_wait: 10s
|
||||||
|
group_interval: 10s
|
||||||
|
repeat_interval: 12h
|
||||||
|
receiver: 'telegram'
|
||||||
|
receivers:
|
||||||
|
- name: 'telegram'
|
||||||
|
telegram_configs:
|
||||||
|
- bot_token: '${TELEGRAM_BOT_TOKEN}'
|
||||||
|
chat_id: ${TELEGRAM_CHAT_ID}
|
||||||
|
parse_mode: 'HTML'
|
||||||
|
message: |
|
||||||
|
{{ range .Alerts }}
|
||||||
|
<b>{{ .Labels.alertname }}</b>
|
||||||
|
{{ if .Labels.severity }}<b>Severity:</b> {{ .Labels.severity }}{{ end }}
|
||||||
|
<b>Status:</b> {{ .Status }}
|
||||||
|
{{ if .Annotations.summary }}<b>Summary:</b> {{ .Annotations.summary }}{{ end }}
|
||||||
|
{{ if .Annotations.description }}<b>Description:</b> {{ .Annotations.description }}{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
|
alertmanagerSpec:
|
||||||
|
secrets:
|
||||||
|
- alertmanager-telegram-secret
|
||||||
|
|
||||||
prometheus:
|
prometheus:
|
||||||
prometheusSpec:
|
prometheusSpec:
|
||||||
enableRemoteWriteReceiver: true
|
enableRemoteWriteReceiver: true
|
||||||
|
|||||||
37
k8s/core/prom-stack/promtail-values.yaml
Normal file
37
k8s/core/prom-stack/promtail-values.yaml
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# Promtail - log collection agent for all cluster pods
|
||||||
|
config:
|
||||||
|
clients:
|
||||||
|
- url: http://loki-gateway.prometheus.svc:80/loki/api/v1/push
|
||||||
|
|
||||||
|
# DaemonSet - runs on every node
|
||||||
|
daemonset:
|
||||||
|
enabled: true
|
||||||
|
|
||||||
|
# Tolerations for master/control-plane nodes
|
||||||
|
tolerations:
|
||||||
|
- key: node-role.kubernetes.io/master
|
||||||
|
operator: Exists
|
||||||
|
effect: NoSchedule
|
||||||
|
- key: node-role.kubernetes.io/control-plane
|
||||||
|
operator: Exists
|
||||||
|
effect: NoSchedule
|
||||||
|
|
||||||
|
# Init container to increase inotify limits
|
||||||
|
initContainer:
|
||||||
|
- name: init-inotify
|
||||||
|
image: docker.io/busybox:1.36
|
||||||
|
imagePullPolicy: IfNotPresent
|
||||||
|
command:
|
||||||
|
- sh
|
||||||
|
- -c
|
||||||
|
- sysctl -w fs.inotify.max_user_instances=512
|
||||||
|
securityContext:
|
||||||
|
privileged: true
|
||||||
|
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
cpu: 50m
|
||||||
|
memory: 64Mi
|
||||||
|
limits:
|
||||||
|
cpu: 200m
|
||||||
|
memory: 128Mi
|
||||||
21
k8s/desktop/jellyfin/app.yaml
Normal file
21
k8s/desktop/jellyfin/app.yaml
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: Application
|
||||||
|
metadata:
|
||||||
|
name: jellyfin-uk
|
||||||
|
namespace: argocd
|
||||||
|
spec:
|
||||||
|
project: apps
|
||||||
|
destination:
|
||||||
|
namespace: jellyfin-uk
|
||||||
|
server: https://kubernetes.default.svc
|
||||||
|
source:
|
||||||
|
repoURL: ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||||
|
targetRevision: HEAD
|
||||||
|
path: k8s/desktop/jellyfin
|
||||||
|
syncPolicy:
|
||||||
|
automated:
|
||||||
|
selfHeal: true
|
||||||
|
prune: true
|
||||||
|
syncOptions:
|
||||||
|
- CreateNamespace=true
|
||||||
|
|
||||||
16
k8s/desktop/jellyfin/kustomization.yaml
Normal file
16
k8s/desktop/jellyfin/kustomization.yaml
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||||
|
kind: Kustomization
|
||||||
|
|
||||||
|
resources:
|
||||||
|
- app.yaml
|
||||||
|
- qbittorent.yaml
|
||||||
|
|
||||||
|
helmCharts:
|
||||||
|
- name: jellyfin
|
||||||
|
repo: https://utkuozdemir.org/helm-charts
|
||||||
|
version: 2.0.0
|
||||||
|
releaseName: jellyfin
|
||||||
|
namespace: jellyfin
|
||||||
|
valuesFile: values.yaml
|
||||||
|
includeCRDs: true
|
||||||
|
|
||||||
123
k8s/desktop/jellyfin/qbittorent.yaml
Normal file
123
k8s/desktop/jellyfin/qbittorent.yaml
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
---
|
||||||
|
apiVersion: external-secrets.io/v1
|
||||||
|
kind: ExternalSecret
|
||||||
|
metadata:
|
||||||
|
name: vpn-creds
|
||||||
|
spec:
|
||||||
|
target:
|
||||||
|
name: vpn-creds
|
||||||
|
deletionPolicy: Delete
|
||||||
|
template:
|
||||||
|
type: Opaque
|
||||||
|
data:
|
||||||
|
ss_link: |-
|
||||||
|
{{ .ss_link }}
|
||||||
|
data:
|
||||||
|
- secretKey: ss_link
|
||||||
|
sourceRef:
|
||||||
|
storeRef:
|
||||||
|
name: vaultwarden-login
|
||||||
|
kind: ClusterSecretStore
|
||||||
|
remoteRef:
|
||||||
|
key: cfee6f62-fb06-4a4c-b6d8-92da4908c65a
|
||||||
|
property: fields[0].value
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: qbittorrent
|
||||||
|
labels:
|
||||||
|
app: qbittorrent
|
||||||
|
annotations:
|
||||||
|
reloader.stakater.com/auto: "true"
|
||||||
|
spec:
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: qbittorrent
|
||||||
|
replicas: 1
|
||||||
|
strategy:
|
||||||
|
type: RollingUpdate
|
||||||
|
rollingUpdate:
|
||||||
|
maxSurge: 1
|
||||||
|
maxUnavailable: 0
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: qbittorrent
|
||||||
|
spec:
|
||||||
|
nodeSelector:
|
||||||
|
kubernetes.io/hostname: uk-desktop.tail2fe2d.ts.net
|
||||||
|
tolerations:
|
||||||
|
- key: workload
|
||||||
|
operator: Equal
|
||||||
|
value: desktop
|
||||||
|
effect: NoSchedule
|
||||||
|
volumes:
|
||||||
|
- name: config
|
||||||
|
hostPath:
|
||||||
|
path: /k8s/qbt-config
|
||||||
|
type: DirectoryOrCreate
|
||||||
|
- name: media
|
||||||
|
hostPath:
|
||||||
|
path: /k8s/media/downloads
|
||||||
|
type: DirectoryOrCreate
|
||||||
|
containers:
|
||||||
|
- name: qbittorrent
|
||||||
|
image: 'linuxserver/qbittorrent:latest'
|
||||||
|
ports:
|
||||||
|
- name: http
|
||||||
|
containerPort: 8080
|
||||||
|
protocol: TCP
|
||||||
|
volumeMounts:
|
||||||
|
- name: config
|
||||||
|
mountPath: /config
|
||||||
|
- name: media
|
||||||
|
mountPath: /downloads
|
||||||
|
- name: shadowsocks-proxy
|
||||||
|
image: teddysun/shadowsocks-rust:latest
|
||||||
|
env:
|
||||||
|
- name: SS_LINK
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: vpn-creds
|
||||||
|
key: ss_link
|
||||||
|
command: ["/bin/bash", "-c", "rm /etc/shadowsocks-rust/config.json && sslocal --server-url $SS_LINK --local-addr 127.0.0.1:8081 -U --protocol http"]
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
memory: "64Mi"
|
||||||
|
cpu: "300m"
|
||||||
|
limits:
|
||||||
|
memory: "128Mi"
|
||||||
|
cpu: "300m"
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: qbittorrent
|
||||||
|
spec:
|
||||||
|
selector:
|
||||||
|
app: qbittorrent
|
||||||
|
ports:
|
||||||
|
- protocol: TCP
|
||||||
|
port: 80
|
||||||
|
targetPort: 8080
|
||||||
|
|
||||||
|
---
|
||||||
|
apiVersion: networking.k8s.io/v1
|
||||||
|
kind: Ingress
|
||||||
|
metadata:
|
||||||
|
name: jf-local-ingress
|
||||||
|
annotations:
|
||||||
|
ingressClassName: traefik
|
||||||
|
spec:
|
||||||
|
rules:
|
||||||
|
- host: tr.uk
|
||||||
|
http:
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
backend:
|
||||||
|
service:
|
||||||
|
name: qbittorrent
|
||||||
|
port:
|
||||||
|
number: 80
|
||||||
41
k8s/desktop/jellyfin/values.yaml
Normal file
41
k8s/desktop/jellyfin/values.yaml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
image:
|
||||||
|
tag: 10.11.4
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
memory: "2Gi"
|
||||||
|
cpu: "1000m"
|
||||||
|
limits:
|
||||||
|
memory: "8Gi"
|
||||||
|
cpu: "6000m"
|
||||||
|
nodeSelector:
|
||||||
|
kubernetes.io/hostname: uk-desktop.tail2fe2d.ts.net
|
||||||
|
tolerations:
|
||||||
|
- key: workload
|
||||||
|
operator: Equal
|
||||||
|
value: desktop
|
||||||
|
effect: NoSchedule
|
||||||
|
persistence:
|
||||||
|
config:
|
||||||
|
enabled: true
|
||||||
|
isPvc: false
|
||||||
|
customVolume:
|
||||||
|
hostPath:
|
||||||
|
path: /k8s/jellyfin
|
||||||
|
type: DirectoryOrCreate
|
||||||
|
data:
|
||||||
|
enabled: true
|
||||||
|
isPvc: false
|
||||||
|
customVolume:
|
||||||
|
hostPath:
|
||||||
|
path: /k8s/media/downloads
|
||||||
|
type: DirectoryOrCreate
|
||||||
|
|
||||||
|
ingress:
|
||||||
|
enabled: true
|
||||||
|
className: traefik
|
||||||
|
hosts:
|
||||||
|
- host: jf.uk
|
||||||
|
paths:
|
||||||
|
- path: /
|
||||||
|
pathType: Prefix
|
||||||
|
|
||||||
18
k8s/desktop/khm/app.yaml
Normal file
18
k8s/desktop/khm/app.yaml
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
apiVersion: argoproj.io/v1alpha1
|
||||||
|
kind: Application
|
||||||
|
metadata:
|
||||||
|
name: khm-client
|
||||||
|
namespace: argocd
|
||||||
|
spec:
|
||||||
|
project: desktop
|
||||||
|
destination:
|
||||||
|
namespace: khm
|
||||||
|
server: https://kubernetes.default.svc
|
||||||
|
source:
|
||||||
|
repoURL: ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||||
|
targetRevision: HEAD
|
||||||
|
path: k8s/desktop/khm
|
||||||
|
syncPolicy:
|
||||||
|
automated:
|
||||||
|
selfHeal: true
|
||||||
|
prune: true
|
||||||
33
k8s/desktop/khm/external-secrets.yaml
Normal file
33
k8s/desktop/khm/external-secrets.yaml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
---
|
||||||
|
apiVersion: external-secrets.io/v1
|
||||||
|
kind: ExternalSecret
|
||||||
|
metadata:
|
||||||
|
name: khm-client-creds
|
||||||
|
spec:
|
||||||
|
target:
|
||||||
|
name: khm-client-creds
|
||||||
|
deletionPolicy: Delete
|
||||||
|
template:
|
||||||
|
type: Opaque
|
||||||
|
data:
|
||||||
|
USERNAME: |-
|
||||||
|
{{ .username }}
|
||||||
|
PASSWORD: |-
|
||||||
|
{{ .password }}
|
||||||
|
data:
|
||||||
|
- secretKey: username
|
||||||
|
sourceRef:
|
||||||
|
storeRef:
|
||||||
|
name: vaultwarden-login
|
||||||
|
kind: ClusterSecretStore
|
||||||
|
remoteRef:
|
||||||
|
key: 19c06480-0814-4d1f-aa80-710105989188
|
||||||
|
property: login.username
|
||||||
|
- secretKey: password
|
||||||
|
sourceRef:
|
||||||
|
storeRef:
|
||||||
|
name: vaultwarden-login
|
||||||
|
kind: ClusterSecretStore
|
||||||
|
remoteRef:
|
||||||
|
key: 19c06480-0814-4d1f-aa80-710105989188
|
||||||
|
property: login.password
|
||||||
69
k8s/desktop/khm/khm-client-cronjob.yaml
Normal file
69
k8s/desktop/khm/khm-client-cronjob.yaml
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
---
|
||||||
|
apiVersion: batch/v1
|
||||||
|
kind: CronJob
|
||||||
|
metadata:
|
||||||
|
name: khm-client
|
||||||
|
labels:
|
||||||
|
app: khm-client
|
||||||
|
spec:
|
||||||
|
schedule: "15 * * * *"
|
||||||
|
concurrencyPolicy: Forbid
|
||||||
|
successfulJobsHistoryLimit: 3
|
||||||
|
failedJobsHistoryLimit: 3
|
||||||
|
jobTemplate:
|
||||||
|
spec:
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: khm-client
|
||||||
|
spec:
|
||||||
|
restartPolicy: OnFailure
|
||||||
|
hostNetwork: true
|
||||||
|
nodeSelector:
|
||||||
|
node-role.kubernetes.io/desktop: ""
|
||||||
|
tolerations:
|
||||||
|
- key: workload
|
||||||
|
operator: Equal
|
||||||
|
value: desktop
|
||||||
|
effect: NoSchedule
|
||||||
|
containers:
|
||||||
|
- name: khm-client
|
||||||
|
image: 'ultradesu/khm:latest'
|
||||||
|
imagePullPolicy: Always
|
||||||
|
securityContext:
|
||||||
|
privileged: false
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
memory: "64Mi"
|
||||||
|
cpu: "50m"
|
||||||
|
limits:
|
||||||
|
memory: "256Mi"
|
||||||
|
cpu: "200m"
|
||||||
|
command:
|
||||||
|
- /bin/sh
|
||||||
|
- -c
|
||||||
|
- |
|
||||||
|
/usr/local/bin/khm \
|
||||||
|
--known-hosts /host-ssh/known_hosts \
|
||||||
|
--host https://khm.hexor.cy \
|
||||||
|
--flow=private \
|
||||||
|
--basic-auth="${USERNAME}:${PASSWORD}" \
|
||||||
|
--in-place
|
||||||
|
env:
|
||||||
|
- name: USERNAME
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: khm-client-creds
|
||||||
|
key: USERNAME
|
||||||
|
- name: PASSWORD
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: khm-client-creds
|
||||||
|
key: PASSWORD
|
||||||
|
volumeMounts:
|
||||||
|
- name: known-hosts
|
||||||
|
mountPath: /host-ssh/known_hosts
|
||||||
|
volumes:
|
||||||
|
- name: known-hosts
|
||||||
|
hostPath:
|
||||||
|
path: /home/ab/.ssh/known_hosts
|
||||||
6
k8s/desktop/khm/kustomization.yaml
Normal file
6
k8s/desktop/khm/kustomization.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||||
|
kind: Kustomization
|
||||||
|
|
||||||
|
resources:
|
||||||
|
- external-secrets.yaml
|
||||||
|
- khm-client-cronjob.yaml
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
data "authentik_flow" "default_authorization_flow" {
|
data "authentik_flow" "default_authorization_flow" {
|
||||||
slug = var.default_authorization_flow
|
slug = var.default_authorization_flow
|
||||||
}
|
}
|
||||||
@@ -299,7 +298,7 @@ resource "authentik_outpost" "outposts" {
|
|||||||
kubernetes_ingress_class_name = null
|
kubernetes_ingress_class_name = null
|
||||||
kubernetes_disabled_components = []
|
kubernetes_disabled_components = []
|
||||||
kubernetes_ingress_annotations = {}
|
kubernetes_ingress_annotations = {}
|
||||||
kubernetes_ingress_secret_name = "authentik-outpost-tls"
|
kubernetes_ingress_secret_name = "idm-tls"
|
||||||
})
|
})
|
||||||
|
|
||||||
depends_on = [
|
depends_on = [
|
||||||
|
|||||||
@@ -51,6 +51,9 @@ proxy_applications = {
|
|||||||
internal_host = "http://secret-reader.k8s-secret.svc:80"
|
internal_host = "http://secret-reader.k8s-secret.svc:80"
|
||||||
internal_host_ssl_validation = false
|
internal_host_ssl_validation = false
|
||||||
meta_description = ""
|
meta_description = ""
|
||||||
|
skip_path_regex = <<-EOT
|
||||||
|
/webhook
|
||||||
|
EOT
|
||||||
meta_icon = "https://img.icons8.com/ios-filled/50/password.png"
|
meta_icon = "https://img.icons8.com/ios-filled/50/password.png"
|
||||||
mode = "proxy"
|
mode = "proxy"
|
||||||
outpost = "kubernetes-outpost"
|
outpost = "kubernetes-outpost"
|
||||||
@@ -196,6 +199,7 @@ EOT
|
|||||||
internal_host_ssl_validation = false
|
internal_host_ssl_validation = false
|
||||||
meta_description = ""
|
meta_description = ""
|
||||||
skip_path_regex = <<-EOT
|
skip_path_regex = <<-EOT
|
||||||
|
/
|
||||||
/sub/
|
/sub/
|
||||||
/dashboard/
|
/dashboard/
|
||||||
/api/
|
/api/
|
||||||
|
|||||||
Reference in New Issue
Block a user