Compare commits

..

13 Commits

Author SHA1 Message Date
Gitea Actions Bot
1ace105757 Auto-update README with current k8s applications
All checks were successful
Terraform / Terraform (pull_request) Successful in 1m14s
Generated by CI/CD workflow on 2025-11-24 11:19:59

This PR updates the README.md file with the current list of applications found in the k8s/ directory structure.
2025-11-24 11:19:59 +00:00
Ultradesu
ce4172b435 Increase CPU limits
Some checks are pending
Auto-update README / Generate README and Create MR (push) Waiting to run
Update Kubernetes Services Wiki / Generate and Update K8s Wiki (push) Successful in 12s
Check with kubeconform / lint (push) Successful in 23s
2025-11-24 13:19:04 +02:00
Ultradesu
c841f95bd2 Fixed iperf3 monitor
All checks were successful
Update Kubernetes Services Wiki / Generate and Update K8s Wiki (push) Successful in 12s
Check with kubeconform / lint (push) Successful in 12s
Auto-update README / Generate README and Create MR (push) Successful in 11s
2025-11-19 00:02:10 +02:00
Ultradesu
fb651bc6fe Fixed iperf3 monitor
All checks were successful
Update Kubernetes Services Wiki / Generate and Update K8s Wiki (push) Successful in 13s
Check with kubeconform / lint (push) Successful in 13s
Auto-update README / Generate README and Create MR (push) Successful in 10s
2025-11-18 23:57:43 +02:00
Ultradesu
04330aa474 Fixed iperf3 monitor
All checks were successful
Update Kubernetes Services Wiki / Generate and Update K8s Wiki (push) Successful in 13s
Check with kubeconform / lint (push) Successful in 15s
Auto-update README / Generate README and Create MR (push) Successful in 14s
2025-11-18 22:30:15 +02:00
Ultradesu
57d517af77 Fixed iperf3 monitor
All checks were successful
Update Kubernetes Services Wiki / Generate and Update K8s Wiki (push) Successful in 58s
Check with kubeconform / lint (push) Successful in 1m20s
Auto-update README / Generate README and Create MR (push) Successful in 23s
2025-11-18 22:21:17 +02:00
Ultradesu
1f7d9e41c8 Fixed iperf3 monitor
All checks were successful
Update Kubernetes Services Wiki / Generate and Update K8s Wiki (push) Successful in 30s
Check with kubeconform / lint (push) Successful in 1m8s
Auto-update README / Generate README and Create MR (push) Successful in 37s
2025-11-18 22:16:11 +02:00
Ultradesu
39a27c596f Fixed iperf3
All checks were successful
Update Kubernetes Services Wiki / Generate and Update K8s Wiki (push) Successful in 12s
Check with kubeconform / lint (push) Successful in 20s
Auto-update README / Generate README and Create MR (push) Successful in 11s
2025-11-18 22:13:12 +02:00
Ultradesu
353bb877be Fixed iperf3
All checks were successful
Update Kubernetes Services Wiki / Generate and Update K8s Wiki (push) Successful in 12s
Check with kubeconform / lint (push) Successful in 14s
Auto-update README / Generate README and Create MR (push) Successful in 15s
2025-11-18 21:45:26 +02:00
Ultradesu
e523bb8bea Fixed iperf3
All checks were successful
Update Kubernetes Services Wiki / Generate and Update K8s Wiki (push) Successful in 12s
Check with kubeconform / lint (push) Successful in 14s
Auto-update README / Generate README and Create MR (push) Successful in 12s
2025-11-18 21:44:00 +02:00
Ultradesu
b433373725 Fixed iperf3
All checks were successful
Update Kubernetes Services Wiki / Generate and Update K8s Wiki (push) Successful in 13s
Check with kubeconform / lint (push) Successful in 13s
Auto-update README / Generate README and Create MR (push) Successful in 12s
2025-11-18 21:40:53 +02:00
Ultradesu
3026e53746 Added iperf3-server
All checks were successful
Update Kubernetes Services Wiki / Generate and Update K8s Wiki (push) Successful in 12s
Check with kubeconform / lint (push) Successful in 27s
Auto-update README / Generate README and Create MR (push) Successful in 11s
2025-11-18 21:37:09 +02:00
Ultradesu
63669c69ff Added iperf3-server
All checks were successful
Update Kubernetes Services Wiki / Generate and Update K8s Wiki (push) Successful in 12s
Check with kubeconform / lint (push) Successful in 41s
Auto-update README / Generate README and Create MR (push) Successful in 15s
2025-11-18 21:33:34 +02:00
24 changed files with 271 additions and 41 deletions

View File

@@ -36,7 +36,7 @@ spec:
cpu: "200m"
limits:
memory: "2Gi"
cpu: "1000m"
cpu: "1500m"
env:
- name: GITEA__service__REGISTER_MANUAL_CONFIRM
value: "true"
@@ -129,7 +129,7 @@ spec:
memory: "256Mi"
ephemeral-storage: "1Gi" # reserve ephemeral storage
limits:
cpu: "2000m"
cpu: "3000m"
memory: "4Gi"
ephemeral-storage: "28Gi" # hard cap for /data usage
volumeMounts:

View File

@@ -30,7 +30,7 @@ spec:
cpu: "100m"
memory: "256Mi"
limits:
cpu: "2000m"
cpu: "3000m"
memory: "1Gi"
volumeMounts:
- name: data

View File

@@ -30,7 +30,7 @@ spec:
cpu: "50m"
limits:
memory: "128Mi"
cpu: "200m"
cpu: "300m"
command:
- git
- clone
@@ -49,7 +49,7 @@ spec:
cpu: "50m"
limits:
memory: "256Mi"
cpu: "200m"
cpu: "300m"
volumeMounts:
- name: hexound-repo
mountPath: /var/www/html

View File

@@ -23,7 +23,7 @@ spec:
cpu: "500m"
limits:
memory: "4Gi"
cpu: "2000m"
cpu: "3000m"
ports:
- containerPort: 2283
env:
@@ -160,7 +160,7 @@ spec:
cpu: "1000m"
limits:
memory: "8Gi"
cpu: "4000m"
cpu: "6000m"
env:
- name: TZ
value: Asia/Nicosia
@@ -201,7 +201,7 @@ spec:
cpu: "100m"
limits:
memory: "512Mi"
cpu: "500m"
cpu: "750m"
readinessProbe:
exec:
command: ["redis-cli", "ping"]

View File

@@ -11,8 +11,61 @@ spec:
labels:
app: iperf3-server
spec:
hostname: iperf3-$(NODE_NAME)
serviceAccountName: iperf3-server
subdomain: iperf3
initContainers:
- name: create-service
image: bitnami/kubectl:latest
env:
- name: NODE_NAME
valueFrom:
fieldRef:
fieldPath: spec.nodeName
- name: POD_IP
valueFrom:
fieldRef:
fieldPath: status.podIP
command:
- /bin/bash
- -c
- |
# Clean node name for service name
NODE_CLEAN=$(echo "$NODE_NAME" | cut -d'.' -f1 | tr '[:upper:]' '[:lower:]' | tr '_' '-')
SERVICE_NAME="iperf3-${NODE_CLEAN}"
# Create service for this pod
kubectl apply -f - <<EOF
apiVersion: v1
kind: Service
metadata:
name: ${SERVICE_NAME}
namespace: iperf3
labels:
app: iperf3-node-service
target-node: "${NODE_NAME}"
spec:
type: ClusterIP
ports:
- name: iperf3
port: 5201
protocol: TCP
---
apiVersion: v1
kind: Endpoints
metadata:
name: ${SERVICE_NAME}
namespace: iperf3
labels:
app: iperf3-node-service
target-node: "${NODE_NAME}"
subsets:
- addresses:
- ip: ${POD_IP}
ports:
- name: iperf3
port: 5201
protocol: TCP
EOF
containers:
- name: iperf3-server
image: networkstatic/iperf3:latest
@@ -31,7 +84,7 @@ spec:
cpu: "100m"
limits:
memory: "256Mi"
cpu: "500m"
cpu: "750m"
tolerations:
- effect: NoSchedule
operator: Exists

View File

@@ -14,9 +14,61 @@ spec:
labels:
app: iperf3-exporter
spec:
serviceAccountName: iperf3-server
initContainers:
- name: create-exporter-service
image: bitnami/kubectl:latest
env:
- name: NODE_NAME
valueFrom:
fieldRef:
fieldPath: spec.nodeName
- name: POD_IP
valueFrom:
fieldRef:
fieldPath: status.podIP
command:
- /bin/bash
- -c
- |
NODE_CLEAN=$(echo "$NODE_NAME" | cut -d'.' -f1 | tr '[:upper:]' '[:lower:]' | tr '_' '-')
SERVICE_NAME="iperf3-exporter-${NODE_CLEAN}"
kubectl apply -f - <<EOF
apiVersion: v1
kind: Service
metadata:
name: ${SERVICE_NAME}
namespace: iperf3
labels:
app: iperf3-exporter-service
target-node: "${NODE_NAME}"
spec:
type: ClusterIP
ports:
- name: metrics
port: 9579
protocol: TCP
---
apiVersion: v1
kind: Endpoints
metadata:
name: ${SERVICE_NAME}
namespace: iperf3
labels:
app: iperf3-exporter-service
target-node: "${NODE_NAME}"
subsets:
- addresses:
- ip: ${POD_IP}
ports:
- name: metrics
port: 9579
protocol: TCP
EOF
containers:
- name: iperf3-exporter
image: edgardlt/iperf3_exporter:latest
image: ghcr.io/edgard/iperf3_exporter:1.2.2
ports:
- containerPort: 9579
name: metrics
@@ -27,7 +79,7 @@ spec:
cpu: "50m"
limits:
memory: "128Mi"
cpu: "200m"
cpu: "300m"
env:
- name: NODE_NAME
valueFrom:

View File

@@ -2,6 +2,7 @@ apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
- rbac.yaml
- daemonset.yaml
- service-headless.yaml
- iperf3-exporter-daemonset.yaml

36
k8s/apps/iperf3/rbac.yaml Normal file
View File

@@ -0,0 +1,36 @@
---
apiVersion: v1
kind: ServiceAccount
metadata:
name: iperf3-server
namespace: iperf3
labels:
app: iperf3-server
---
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: iperf3-service-manager
namespace: iperf3
labels:
app: iperf3-server
rules:
- apiGroups: [""]
resources: ["services", "endpoints"]
verbs: ["get", "list", "create", "update", "patch", "delete"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: iperf3-service-manager
namespace: iperf3
labels:
app: iperf3-server
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: iperf3-service-manager
subjects:
- kind: ServiceAccount
name: iperf3-server
namespace: iperf3

View File

@@ -3,7 +3,6 @@ apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: iperf3-exporter
namespace: default
labels:
app: iperf3-exporter
release: prometheus
@@ -17,19 +16,105 @@ spec:
interval: 5m
scrapeTimeout: 30s
params:
duration:
- "10"
streams:
- "4"
target: ['iperf3-ch.iperf3.svc.cluster.local:5201']
period: ['10s']
streams: ['4']
relabelings:
- sourceLabels: [__address__]
targetLabel: __param_target
regex: (.+):9579
replacement: iperf3-${1}.iperf3.default.svc.cluster.local:5201
- sourceLabels: [__param_target]
targetLabel: instance
- targetLabel: __address__
replacement: iperf3-exporter.default.svc.cluster.local:9579
replacement: iperf3-exporter-ch.iperf3.svc:9579
- port: metrics
path: /probe
interval: 5m
scrapeTimeout: 30s
params:
target: ['iperf3-us.iperf3.svc.cluster.local:5201']
period: ['10s']
streams: ['4']
relabelings:
- sourceLabels: [__param_target]
targetLabel: instance
- targetLabel: __address__
replacement: iperf3-exporter-us.iperf3.svc:9579
- port: metrics
path: /probe
interval: 5m
scrapeTimeout: 30s
params:
target: ['iperf3-iris.iperf3.svc.cluster.local:5201']
period: ['10s']
streams: ['4']
relabelings:
- sourceLabels: [__param_target]
targetLabel: instance
- targetLabel: __address__
replacement: iperf3-exporter-iris.iperf3.svc:9579
- port: metrics
path: /probe
interval: 5m
scrapeTimeout: 30s
params:
target: ['iperf3-home.iperf3.svc.cluster.local:5201']
period: ['10s']
streams: ['4']
relabelings:
- sourceLabels: [__param_target]
targetLabel: instance
- targetLabel: __address__
replacement: iperf3-exporter-home.iperf3.svc:9579
- port: metrics
path: /probe
interval: 5m
scrapeTimeout: 30s
params:
target: ['iperf3-master.iperf3.svc.cluster.local:5201']
period: ['10s']
streams: ['4']
relabelings:
- sourceLabels: [__param_target]
targetLabel: instance
- targetLabel: __address__
replacement: iperf3-exporter-master.iperf3.svc:9579
- port: metrics
path: /probe
interval: 5m
scrapeTimeout: 30s
params:
target: ['iperf3-it.iperf3.svc.cluster.local:5201']
period: ['10s']
streams: ['4']
relabelings:
- sourceLabels: [__param_target]
targetLabel: instance
- targetLabel: __address__
replacement: iperf3-exporter-it.iperf3.svc:9579
- port: metrics
path: /probe
interval: 5m
scrapeTimeout: 30s
params:
target: ['iperf3-nas.iperf3.svc.cluster.local:5201']
period: ['10s']
streams: ['4']
relabelings:
- sourceLabels: [__param_target]
targetLabel: instance
- targetLabel: __address__
replacement: iperf3-exporter-nas.iperf3.svc:9579
- port: metrics
path: /probe
interval: 5m
scrapeTimeout: 30s
params:
target: ['iperf3-spb.iperf3.svc.cluster.local:5201']
period: ['10s']
streams: ['4']
relabelings:
- sourceLabels: [__param_target]
targetLabel: instance
- targetLabel: __address__
replacement: iperf3-exporter-spb.iperf3.svc:9579
metricRelabelings:
- sourceLabels: [__name__]
regex: iperf3_(.+)

View File

@@ -80,7 +80,10 @@ spec:
resources:
requests:
memory: "64Mi"
cpu: "200m"
cpu: "300m"
limits:
memory: "128Mi"
cpu: "300m"
---
apiVersion: v1
kind: Service

View File

@@ -6,7 +6,7 @@ resources:
cpu: "1000m"
limits:
memory: "8Gi"
cpu: "4000m"
cpu: "6000m"
nodeSelector:
kubernetes.io/hostname: master.tail2fe2d.ts.net
persistence:

View File

@@ -40,7 +40,7 @@ spec:
cpu: "50m"
limits:
memory: "128Mi"
cpu: "100m"
cpu: "150m"
livenessProbe:
httpGet:
path: /health

View File

@@ -29,7 +29,7 @@ spec:
cpu: "100m"
limits:
memory: "1Gi"
cpu: "500m"
cpu: "750m"
command:
- /bin/sh
- -c

View File

@@ -13,7 +13,7 @@ resources:
cpu: "200m"
limits:
memory: "2Gi"
cpu: "1000m"
cpu: "1500m"
service:
type: ClusterIP

View File

@@ -6,7 +6,7 @@ resources:
cpu: "500m"
limits:
memory: "4Gi"
cpu: "2000m"
cpu: "3000m"
initContainers:
install-tesseract-langs:
image: ghcr.io/paperless-ngx/paperless-ngx:2.18.2
@@ -16,7 +16,7 @@ initContainers:
cpu: "100m"
limits:
memory: "1Gi"
cpu: "500m"
cpu: "750m"
command: ["/bin/sh", "-c"]
args:
- apt-get update && apt-get install -y --reinstall tesseract-ocr-rus tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-eng tesseract-ocr-ell && cp -v -r /usr/share/tesseract-ocr/5/tessdata/* /custom-tessdata/

View File

@@ -13,7 +13,7 @@ resources:
cpu: "100m"
limits:
memory: "1Gi"
cpu: "500m"
cpu: "750m"
service:
type: ClusterIP

View File

@@ -144,7 +144,7 @@ spec:
cpu: "50m"
limits:
memory: "128Mi"
cpu: "100m"
cpu: "150m"
volumeMounts:
- name: shared-data
mountPath: /shared
@@ -204,7 +204,7 @@ spec:
cpu: "100m"
limits:
memory: "512Mi"
cpu: "500m"
cpu: "750m"
volumeMounts:
- name: shared-data
mountPath: /shared

View File

@@ -27,7 +27,7 @@ spec:
cpu: "100m"
limits:
memory: "512Mi"
cpu: "500m"
cpu: "750m"
command: ["hbbs"]
args:
- "--relay-servers"
@@ -98,7 +98,7 @@ spec:
cpu: "100m"
limits:
memory: "512Mi"
cpu: "500m"
cpu: "750m"
command: ["hbbr"]
args:
- "--port"

View File

@@ -4,7 +4,7 @@ resources:
cpu: "100m"
limits:
memory: "1Gi"
cpu: "500m"
cpu: "750m"
nodeSelector:
kubernetes.io/hostname: master.tail2fe2d.ts.net

View File

@@ -6,7 +6,7 @@ resources:
cpu: "100m"
limits:
memory: "1Gi"
cpu: "500m"
cpu: "750m"
nodeSelector:
kubernetes.io/hostname: master.tail2fe2d.ts.net

View File

@@ -6,7 +6,7 @@ resources:
cpu: "200m"
limits:
memory: "2Gi"
cpu: "1000m"
cpu: "1500m"
nodeSelector:
kubernetes.io/hostname: master.tail2fe2d.ts.net

View File

@@ -33,7 +33,7 @@ resources:
cpu: "200m"
limits:
memory: "2Gi"
cpu: "1000m"
cpu: "1500m"
probes:
liveness:

View File

@@ -37,7 +37,7 @@ spec:
cpu: "100m"
limits:
memory: "1Gi"
cpu: "500m"
cpu: "750m"
env:
- name: DOMAIN
value: https://vw.hexor.cy

View File

@@ -174,7 +174,7 @@ spec:
resources:
limits:
memory: "512Mi"
cpu: "500m"
cpu: "750m"
requests:
memory: "256Mi"
cpu: "250m"