Compare commits
1 Commits
b71f54f714
...
auto-updat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3ad3f6db11 |
@@ -30,29 +30,21 @@ jobs:
|
||||
cli_config_credentials_token: ${{ secrets.TF_API_TOKEN }}
|
||||
|
||||
- name: Terraform Init
|
||||
env:
|
||||
TF_VAR_authentik_token: ${{ secrets.AUTHENTIK_TOKEN }}
|
||||
run: terraform init
|
||||
working-directory: ./terraform/authentik
|
||||
|
||||
- name: Terraform Format
|
||||
env:
|
||||
TF_VAR_authentik_token: ${{ secrets.AUTHENTIK_TOKEN }}
|
||||
run: terraform fmt -check
|
||||
continue-on-error: true
|
||||
working-directory: ./terraform/authentik
|
||||
|
||||
- name: Terraform Apply
|
||||
env:
|
||||
TF_VAR_authentik_token: ${{ secrets.AUTHENTIK_TOKEN }}
|
||||
run: terraform apply -var-file proxy-apps.tfvars -var-file oauth2-apps.tfvars -var-file terraform.tfvars -var-file groups.tfvars -input=false -auto-approve -parallelism=100
|
||||
working-directory: ./terraform/authentik
|
||||
|
||||
- name: Generate Wiki Content
|
||||
if: success()
|
||||
continue-on-error: true
|
||||
env:
|
||||
TF_VAR_authentik_token: ${{ secrets.AUTHENTIK_TOKEN }}
|
||||
run: |
|
||||
echo "📋 Starting Wiki generation..."
|
||||
cd ./terraform/authentik
|
||||
|
||||
@@ -22,13 +22,12 @@ jobs:
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python3 -m venv .venv
|
||||
.venv/bin/pip install pyyaml
|
||||
pip install pyyaml
|
||||
|
||||
- name: Generate K8s Services Wiki
|
||||
run: |
|
||||
echo "📋 Starting K8s wiki generation..."
|
||||
.venv/bin/python .gitea/scripts/generate-k8s-wiki.py k8s/ Kubernetes-Services.md
|
||||
python3 .gitea/scripts/generate-k8s-wiki.py k8s/ Kubernetes-Services.md
|
||||
|
||||
if [ -f "Kubernetes-Services.md" ]; then
|
||||
echo "✅ Wiki content generated successfully"
|
||||
|
||||
@@ -48,7 +48,6 @@ ArgoCD homelab project
|
||||
| **paperless** | [](https://ag.hexor.cy/applications/argocd/paperless) |
|
||||
| **pasarguard** | [](https://ag.hexor.cy/applications/argocd/pasarguard) |
|
||||
| **qbittorent-nas** | [](https://ag.hexor.cy/applications/argocd/qbittorent-nas) |
|
||||
| **remnawave** | [](https://ag.hexor.cy/applications/argocd/remnawave) |
|
||||
| **rustdesk** | [](https://ag.hexor.cy/applications/argocd/rustdesk) |
|
||||
| **sonarr-stack** | [](https://ag.hexor.cy/applications/argocd/sonarr-stack) |
|
||||
| **stirling-pdf** | [](https://ag.hexor.cy/applications/argocd/stirling-pdf) |
|
||||
@@ -56,7 +55,6 @@ ArgoCD homelab project
|
||||
| **tg-bots** | [](https://ag.hexor.cy/applications/argocd/tg-bots) |
|
||||
| **vaultwarden** | [](https://ag.hexor.cy/applications/argocd/vaultwarden) |
|
||||
| **vpn** | [](https://ag.hexor.cy/applications/argocd/vpn) |
|
||||
| **xandikos** | [](https://ag.hexor.cy/applications/argocd/xandikos) |
|
||||
|
||||
</td>
|
||||
</tr>
|
||||
|
||||
@@ -36,7 +36,7 @@ spec:
|
||||
cpu: "200m"
|
||||
limits:
|
||||
memory: "2Gi"
|
||||
cpu: "1500m"
|
||||
cpu: "1000m"
|
||||
env:
|
||||
- name: GITEA__service__REGISTER_MANUAL_CONFIRM
|
||||
value: "true"
|
||||
@@ -77,8 +77,8 @@ spec:
|
||||
labels:
|
||||
app: gitea-runner
|
||||
spec:
|
||||
#nodeSelector:
|
||||
# kubernetes.io/hostname: home.homenet
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: home.homenet
|
||||
volumes:
|
||||
- name: docker-sock
|
||||
hostPath:
|
||||
@@ -90,30 +90,27 @@ spec:
|
||||
affinity:
|
||||
nodeAffinity:
|
||||
preferredDuringSchedulingIgnoredDuringExecution:
|
||||
- weight: 1
|
||||
preference:
|
||||
matchExpressions:
|
||||
- key: kubernetes.io/hostname
|
||||
operator: In
|
||||
values:
|
||||
- home.homenet
|
||||
- weight: 2
|
||||
preference:
|
||||
matchExpressions:
|
||||
- key: kubernetes.io/hostname
|
||||
operator: In
|
||||
values:
|
||||
- master.tail2fe2d.ts.net
|
||||
- weight: 3
|
||||
preference:
|
||||
matchExpressions:
|
||||
- key: kubernetes.io/hostname
|
||||
operator: In
|
||||
values:
|
||||
- it.tail2fe2d.ts.net
|
||||
- ch.tail2fe2d.ts.net
|
||||
- us.tail2fe2d.ts.net
|
||||
|
||||
- home.homenet
|
||||
- weight: 1
|
||||
preference:
|
||||
matchExpressions:
|
||||
- key: kubernetes.io/hostname
|
||||
operator: In
|
||||
values:
|
||||
- master.tail2fe2d.ts.net
|
||||
- weight: 2
|
||||
preference:
|
||||
matchExpressions:
|
||||
- key: kubernetes.io/hostname
|
||||
operator: In
|
||||
values:
|
||||
- nas.homenet
|
||||
requiredDuringSchedulingIgnoredDuringExecution:
|
||||
nodeSelectorTerms:
|
||||
- matchExpressions:
|
||||
@@ -121,9 +118,7 @@ spec:
|
||||
operator: In
|
||||
values:
|
||||
- home.homenet
|
||||
- it.tail2fe2d.ts.net
|
||||
- ch.tail2fe2d.ts.net
|
||||
- us.tail2fe2d.ts.net
|
||||
- nas.homenet
|
||||
- master.tail2fe2d.ts.net
|
||||
containers:
|
||||
- name: gitea-runner
|
||||
@@ -134,7 +129,7 @@ spec:
|
||||
memory: "256Mi"
|
||||
ephemeral-storage: "1Gi" # reserve ephemeral storage
|
||||
limits:
|
||||
cpu: "3000m"
|
||||
cpu: "2000m"
|
||||
memory: "4Gi"
|
||||
ephemeral-storage: "28Gi" # hard cap for /data usage
|
||||
volumeMounts:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: gitea-runner-token
|
||||
@@ -24,7 +24,7 @@ spec:
|
||||
property: login.password
|
||||
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: gitea-recapcha-creds
|
||||
|
||||
@@ -30,7 +30,7 @@ spec:
|
||||
cpu: "100m"
|
||||
memory: "256Mi"
|
||||
limits:
|
||||
cpu: "3000m"
|
||||
cpu: "2000m"
|
||||
memory: "1Gi"
|
||||
volumeMounts:
|
||||
- name: data
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: greece-notifier-creds
|
||||
|
||||
@@ -30,7 +30,7 @@ spec:
|
||||
cpu: "50m"
|
||||
limits:
|
||||
memory: "128Mi"
|
||||
cpu: "300m"
|
||||
cpu: "200m"
|
||||
command:
|
||||
- git
|
||||
- clone
|
||||
@@ -49,7 +49,7 @@ spec:
|
||||
cpu: "50m"
|
||||
limits:
|
||||
memory: "256Mi"
|
||||
cpu: "300m"
|
||||
cpu: "200m"
|
||||
volumeMounts:
|
||||
- name: hexound-repo
|
||||
mountPath: /var/www/html
|
||||
|
||||
@@ -23,7 +23,7 @@ spec:
|
||||
cpu: "500m"
|
||||
limits:
|
||||
memory: "4Gi"
|
||||
cpu: "3000m"
|
||||
cpu: "2000m"
|
||||
ports:
|
||||
- containerPort: 2283
|
||||
env:
|
||||
@@ -160,7 +160,7 @@ spec:
|
||||
cpu: "1000m"
|
||||
limits:
|
||||
memory: "8Gi"
|
||||
cpu: "6000m"
|
||||
cpu: "4000m"
|
||||
env:
|
||||
- name: TZ
|
||||
value: Asia/Nicosia
|
||||
@@ -201,7 +201,7 @@ spec:
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "750m"
|
||||
cpu: "500m"
|
||||
readinessProbe:
|
||||
exec:
|
||||
command: ["redis-cli", "ping"]
|
||||
|
||||
@@ -11,61 +11,23 @@ spec:
|
||||
labels:
|
||||
app: iperf3-server
|
||||
spec:
|
||||
serviceAccountName: iperf3-server
|
||||
subdomain: iperf3
|
||||
initContainers:
|
||||
- name: create-service
|
||||
image: bitnami/kubectl:latest
|
||||
- name: set-hostname
|
||||
image: busybox:1.35
|
||||
command: ['sh', '-c']
|
||||
args:
|
||||
- |
|
||||
NODE_NAME=$(echo $NODE_NAME | cut -d'.' -f1 | tr '[:upper:]' '[:lower:]')
|
||||
echo "iperf3-${NODE_NAME}" > /etc/hostname
|
||||
hostname "iperf3-${NODE_NAME}"
|
||||
securityContext:
|
||||
privileged: true
|
||||
env:
|
||||
- name: NODE_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.nodeName
|
||||
- name: POD_IP
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: status.podIP
|
||||
command:
|
||||
- /bin/bash
|
||||
- -c
|
||||
- |
|
||||
# Clean node name for service name
|
||||
NODE_CLEAN=$(echo "$NODE_NAME" | cut -d'.' -f1 | tr '[:upper:]' '[:lower:]' | tr '_' '-')
|
||||
SERVICE_NAME="iperf3-${NODE_CLEAN}"
|
||||
|
||||
# Create service for this pod
|
||||
kubectl apply -f - <<EOF
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: ${SERVICE_NAME}
|
||||
namespace: iperf3
|
||||
labels:
|
||||
app: iperf3-node-service
|
||||
target-node: "${NODE_NAME}"
|
||||
spec:
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- name: iperf3
|
||||
port: 5201
|
||||
protocol: TCP
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Endpoints
|
||||
metadata:
|
||||
name: ${SERVICE_NAME}
|
||||
namespace: iperf3
|
||||
labels:
|
||||
app: iperf3-node-service
|
||||
target-node: "${NODE_NAME}"
|
||||
subsets:
|
||||
- addresses:
|
||||
- ip: ${POD_IP}
|
||||
ports:
|
||||
- name: iperf3
|
||||
port: 5201
|
||||
protocol: TCP
|
||||
EOF
|
||||
containers:
|
||||
- name: iperf3-server
|
||||
image: networkstatic/iperf3:latest
|
||||
@@ -78,13 +40,15 @@ spec:
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.nodeName
|
||||
- name: HOSTNAME
|
||||
value: $(NODE_NAME)
|
||||
resources:
|
||||
requests:
|
||||
memory: "64Mi"
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "256Mi"
|
||||
cpu: "750m"
|
||||
cpu: "500m"
|
||||
tolerations:
|
||||
- effect: NoSchedule
|
||||
operator: Exists
|
||||
|
||||
@@ -14,58 +14,6 @@ spec:
|
||||
labels:
|
||||
app: iperf3-exporter
|
||||
spec:
|
||||
serviceAccountName: iperf3-server
|
||||
initContainers:
|
||||
- name: create-exporter-service
|
||||
image: bitnami/kubectl:latest
|
||||
env:
|
||||
- name: NODE_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.nodeName
|
||||
- name: POD_IP
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: status.podIP
|
||||
command:
|
||||
- /bin/bash
|
||||
- -c
|
||||
- |
|
||||
NODE_CLEAN=$(echo "$NODE_NAME" | cut -d'.' -f1 | tr '[:upper:]' '[:lower:]' | tr '_' '-')
|
||||
SERVICE_NAME="iperf3-exporter-${NODE_CLEAN}"
|
||||
|
||||
kubectl apply -f - <<EOF
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: ${SERVICE_NAME}
|
||||
namespace: iperf3
|
||||
labels:
|
||||
app: iperf3-exporter-service
|
||||
target-node: "${NODE_NAME}"
|
||||
spec:
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- name: metrics
|
||||
port: 9579
|
||||
protocol: TCP
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Endpoints
|
||||
metadata:
|
||||
name: ${SERVICE_NAME}
|
||||
namespace: iperf3
|
||||
labels:
|
||||
app: iperf3-exporter-service
|
||||
target-node: "${NODE_NAME}"
|
||||
subsets:
|
||||
- addresses:
|
||||
- ip: ${POD_IP}
|
||||
ports:
|
||||
- name: metrics
|
||||
port: 9579
|
||||
protocol: TCP
|
||||
EOF
|
||||
containers:
|
||||
- name: iperf3-exporter
|
||||
image: ghcr.io/edgard/iperf3_exporter:1.2.2
|
||||
@@ -79,7 +27,7 @@ spec:
|
||||
cpu: "50m"
|
||||
limits:
|
||||
memory: "128Mi"
|
||||
cpu: "300m"
|
||||
cpu: "200m"
|
||||
env:
|
||||
- name: NODE_NAME
|
||||
valueFrom:
|
||||
|
||||
@@ -2,7 +2,6 @@ apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- rbac.yaml
|
||||
- daemonset.yaml
|
||||
- service-headless.yaml
|
||||
- iperf3-exporter-daemonset.yaml
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: iperf3-server
|
||||
namespace: iperf3
|
||||
labels:
|
||||
app: iperf3-server
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: Role
|
||||
metadata:
|
||||
name: iperf3-service-manager
|
||||
namespace: iperf3
|
||||
labels:
|
||||
app: iperf3-server
|
||||
rules:
|
||||
- apiGroups: [""]
|
||||
resources: ["services", "endpoints"]
|
||||
verbs: ["get", "list", "create", "update", "patch", "delete"]
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: RoleBinding
|
||||
metadata:
|
||||
name: iperf3-service-manager
|
||||
namespace: iperf3
|
||||
labels:
|
||||
app: iperf3-server
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: Role
|
||||
name: iperf3-service-manager
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: iperf3-server
|
||||
namespace: iperf3
|
||||
@@ -16,105 +16,19 @@ spec:
|
||||
interval: 5m
|
||||
scrapeTimeout: 30s
|
||||
params:
|
||||
target: ['iperf3-ch.iperf3.svc.cluster.local:5201']
|
||||
period: ['10s']
|
||||
streams: ['4']
|
||||
duration:
|
||||
- "10"
|
||||
streams:
|
||||
- "4"
|
||||
relabelings:
|
||||
- sourceLabels: [__address__]
|
||||
targetLabel: __param_target
|
||||
regex: (.+):9579
|
||||
replacement: iperf3-${1}.iperf3.iperf3.svc.cluster.local:5201
|
||||
- sourceLabels: [__param_target]
|
||||
targetLabel: instance
|
||||
- targetLabel: __address__
|
||||
replacement: iperf3-exporter-ch.iperf3.svc:9579
|
||||
- port: metrics
|
||||
path: /probe
|
||||
interval: 5m
|
||||
scrapeTimeout: 30s
|
||||
params:
|
||||
target: ['iperf3-us.iperf3.svc.cluster.local:5201']
|
||||
period: ['10s']
|
||||
streams: ['4']
|
||||
relabelings:
|
||||
- sourceLabels: [__param_target]
|
||||
targetLabel: instance
|
||||
- targetLabel: __address__
|
||||
replacement: iperf3-exporter-us.iperf3.svc:9579
|
||||
- port: metrics
|
||||
path: /probe
|
||||
interval: 5m
|
||||
scrapeTimeout: 30s
|
||||
params:
|
||||
target: ['iperf3-iris.iperf3.svc.cluster.local:5201']
|
||||
period: ['10s']
|
||||
streams: ['4']
|
||||
relabelings:
|
||||
- sourceLabels: [__param_target]
|
||||
targetLabel: instance
|
||||
- targetLabel: __address__
|
||||
replacement: iperf3-exporter-iris.iperf3.svc:9579
|
||||
- port: metrics
|
||||
path: /probe
|
||||
interval: 5m
|
||||
scrapeTimeout: 30s
|
||||
params:
|
||||
target: ['iperf3-home.iperf3.svc.cluster.local:5201']
|
||||
period: ['10s']
|
||||
streams: ['4']
|
||||
relabelings:
|
||||
- sourceLabels: [__param_target]
|
||||
targetLabel: instance
|
||||
- targetLabel: __address__
|
||||
replacement: iperf3-exporter-home.iperf3.svc:9579
|
||||
- port: metrics
|
||||
path: /probe
|
||||
interval: 5m
|
||||
scrapeTimeout: 30s
|
||||
params:
|
||||
target: ['iperf3-master.iperf3.svc.cluster.local:5201']
|
||||
period: ['10s']
|
||||
streams: ['4']
|
||||
relabelings:
|
||||
- sourceLabels: [__param_target]
|
||||
targetLabel: instance
|
||||
- targetLabel: __address__
|
||||
replacement: iperf3-exporter-master.iperf3.svc:9579
|
||||
- port: metrics
|
||||
path: /probe
|
||||
interval: 5m
|
||||
scrapeTimeout: 30s
|
||||
params:
|
||||
target: ['iperf3-it.iperf3.svc.cluster.local:5201']
|
||||
period: ['10s']
|
||||
streams: ['4']
|
||||
relabelings:
|
||||
- sourceLabels: [__param_target]
|
||||
targetLabel: instance
|
||||
- targetLabel: __address__
|
||||
replacement: iperf3-exporter-it.iperf3.svc:9579
|
||||
- port: metrics
|
||||
path: /probe
|
||||
interval: 5m
|
||||
scrapeTimeout: 30s
|
||||
params:
|
||||
target: ['iperf3-nas.iperf3.svc.cluster.local:5201']
|
||||
period: ['10s']
|
||||
streams: ['4']
|
||||
relabelings:
|
||||
- sourceLabels: [__param_target]
|
||||
targetLabel: instance
|
||||
- targetLabel: __address__
|
||||
replacement: iperf3-exporter-nas.iperf3.svc:9579
|
||||
- port: metrics
|
||||
path: /probe
|
||||
interval: 5m
|
||||
scrapeTimeout: 30s
|
||||
params:
|
||||
target: ['iperf3-spb.iperf3.svc.cluster.local:5201']
|
||||
period: ['10s']
|
||||
streams: ['4']
|
||||
relabelings:
|
||||
- sourceLabels: [__param_target]
|
||||
targetLabel: instance
|
||||
- targetLabel: __address__
|
||||
replacement: iperf3-exporter-spb.iperf3.svc:9579
|
||||
replacement: iperf3-exporter.iperf3.svc.cluster.local:9579
|
||||
metricRelabelings:
|
||||
- sourceLabels: [__name__]
|
||||
regex: iperf3_(.+)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: vpn-creds
|
||||
@@ -76,14 +76,11 @@ spec:
|
||||
secretKeyRef:
|
||||
name: vpn-creds
|
||||
key: ss_link
|
||||
command: ["/bin/bash", "-c", "rm /etc/shadowsocks-rust/config.json && sslocal --server-url $SS_LINK --local-addr 127.0.0.1:8081 -U --protocol http"]
|
||||
command: ["/bin/bash", "-c", "rm /etc/shadowsocks-rust/config.json && sslocal --online-config-url $SS_LINK --local-addr 127.0.0.1:8081 -U --protocol http"]
|
||||
resources:
|
||||
requests:
|
||||
memory: "64Mi"
|
||||
cpu: "300m"
|
||||
limits:
|
||||
memory: "128Mi"
|
||||
cpu: "300m"
|
||||
cpu: "200m"
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
image:
|
||||
tag: 10.11.4
|
||||
tag: 10.10.7
|
||||
resources:
|
||||
requests:
|
||||
memory: "2Gi"
|
||||
cpu: "1000m"
|
||||
limits:
|
||||
memory: "8Gi"
|
||||
cpu: "6000m"
|
||||
cpu: "4000m"
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
persistence:
|
||||
@@ -36,40 +36,8 @@ ingress:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
- host: us.hexor.cy
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
- host: ch.hexor.cy
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
- host: jp.hexor.cy
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
- host: spb.hexor.cy
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
- host: cy.hexor.cy
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
- host: am.hexor.cy
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
- host: de.hexor.cy
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
- host: it.hexor.cy
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
tls:
|
||||
- secretName: jellyfin-tls
|
||||
hosts:
|
||||
- '*.hexor.cy'
|
||||
- 'jf.hexor.cy'
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ spec:
|
||||
kubernetes.io/os: linux
|
||||
containers:
|
||||
- name: secret-reader
|
||||
image: ultradesu/k8s-secrets:0.2.1
|
||||
image: ultradesu/k8s-secrets:0.1.1
|
||||
imagePullPolicy: Always
|
||||
args:
|
||||
- "--secrets"
|
||||
@@ -28,7 +28,6 @@ spec:
|
||||
- "k8s-secret"
|
||||
- "--port"
|
||||
- "3000"
|
||||
- "--webhook"
|
||||
ports:
|
||||
- containerPort: 3000
|
||||
name: http
|
||||
@@ -41,7 +40,7 @@ spec:
|
||||
cpu: "50m"
|
||||
limits:
|
||||
memory: "128Mi"
|
||||
cpu: "150m"
|
||||
cpu: "100m"
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /health
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: openai-creds
|
||||
|
||||
@@ -29,7 +29,7 @@ spec:
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "1Gi"
|
||||
cpu: "750m"
|
||||
cpu: "500m"
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: khm-pg-creds
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: oidc-secret
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: postgres-and-oauth-creds
|
||||
|
||||
@@ -13,7 +13,7 @@ resources:
|
||||
cpu: "200m"
|
||||
limits:
|
||||
memory: "2Gi"
|
||||
cpu: "1500m"
|
||||
cpu: "1000m"
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
|
||||
@@ -27,11 +27,4 @@ helmCharts:
|
||||
namespace: paperless
|
||||
valuesFile: gotenberg-values.yaml
|
||||
includeCRDs: true
|
||||
#- name: redis
|
||||
# repo: oci://registry-1.docker.io/bitnamicharts/redis
|
||||
# version: 24.1.0
|
||||
# releaseName: redis
|
||||
# namespace: paperless
|
||||
# includeCRDs: true
|
||||
# valuesFile: bazarr-values.yaml
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ resources:
|
||||
cpu: "500m"
|
||||
limits:
|
||||
memory: "4Gi"
|
||||
cpu: "3000m"
|
||||
cpu: "2000m"
|
||||
initContainers:
|
||||
install-tesseract-langs:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:2.18.2
|
||||
@@ -16,7 +16,7 @@ initContainers:
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "1Gi"
|
||||
cpu: "750m"
|
||||
cpu: "500m"
|
||||
command: ["/bin/sh", "-c"]
|
||||
args:
|
||||
- apt-get update && apt-get install -y --reinstall tesseract-ocr-rus tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-eng tesseract-ocr-ell && cp -v -r /usr/share/tesseract-ocr/5/tessdata/* /custom-tessdata/
|
||||
@@ -107,8 +107,6 @@ persistence:
|
||||
- path: /usr/src/paperless/consume
|
||||
redis:
|
||||
enabled: true
|
||||
image:
|
||||
tag: latest
|
||||
master:
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: nas.homenet
|
||||
|
||||
@@ -13,7 +13,7 @@ resources:
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "1Gi"
|
||||
cpu: "750m"
|
||||
cpu: "500m"
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
|
||||
@@ -1,212 +0,0 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: pasarguard-scripts-ingress
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
data:
|
||||
init-uuid-ingress.sh: |
|
||||
#!/bin/bash
|
||||
set -e
|
||||
echo "Started"
|
||||
# NODE_NAME is already set via environment variable
|
||||
NAMESPACE=$(cat /var/run/secrets/kubernetes.io/serviceaccount/namespace)
|
||||
|
||||
# Get DNS name from node label xray-public-address
|
||||
DNS_NAME=$(kubectl get node "${NODE_NAME}" -o jsonpath='{.metadata.labels.xray-public-address}')
|
||||
|
||||
if [ -z "${DNS_NAME}" ]; then
|
||||
echo "ERROR: Node ${NODE_NAME} does not have label 'xray-public-address'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Node: ${NODE_NAME}"
|
||||
echo "DNS Name from label: ${DNS_NAME}"
|
||||
|
||||
# Use DNS name for ConfigMap name to ensure uniqueness
|
||||
CONFIGMAP_NAME="node-uuid-ingress-${DNS_NAME//./-}"
|
||||
|
||||
echo "Checking ConfigMap: ${CONFIGMAP_NAME}"
|
||||
|
||||
# Check if ConfigMap exists and get UUID
|
||||
if kubectl get configmap "${CONFIGMAP_NAME}" -n "${NAMESPACE}" &>/dev/null; then
|
||||
echo "ConfigMap exists, reading UUID..."
|
||||
API_KEY=$(kubectl get configmap "${CONFIGMAP_NAME}" -n "${NAMESPACE}" -o jsonpath='{.data.API_KEY}')
|
||||
|
||||
if [ -z "${API_KEY}" ]; then
|
||||
echo "UUID not found in ConfigMap, generating new one..."
|
||||
API_KEY=$(cat /proc/sys/kernel/random/uuid)
|
||||
kubectl patch configmap "${CONFIGMAP_NAME}" -n "${NAMESPACE}" --type merge -p "{\"data\":{\"API_KEY\":\"${API_KEY}\"}}"
|
||||
else
|
||||
echo "Using existing UUID from ConfigMap"
|
||||
fi
|
||||
else
|
||||
echo "ConfigMap does not exist, creating new one..."
|
||||
API_KEY=$(cat /proc/sys/kernel/random/uuid)
|
||||
kubectl create configmap "${CONFIGMAP_NAME}" -n "${NAMESPACE}" \
|
||||
--from-literal=API_KEY="${API_KEY}" \
|
||||
--from-literal=NODE_NAME="${NODE_NAME}"
|
||||
fi
|
||||
|
||||
# Save UUID and node info to shared volume for the main container
|
||||
echo -n "${API_KEY}" > /shared/api-key
|
||||
echo -n "${NODE_NAME}" > /shared/node-name
|
||||
echo -n "${CONFIGMAP_NAME}" > /shared/configmap-name
|
||||
echo "UUID initialized: ${API_KEY}"
|
||||
echo "Node name: ${NODE_NAME}"
|
||||
echo "ConfigMap: ${CONFIGMAP_NAME}"
|
||||
|
||||
# Create Certificate for this node using DNS name from label
|
||||
CERT_NAME="pasarguard-node-ingress-${DNS_NAME//./-}"
|
||||
|
||||
echo "Creating Certificate: ${CERT_NAME} for ${DNS_NAME}"
|
||||
|
||||
# Check if Certificate already exists
|
||||
if ! kubectl get certificate "${CERT_NAME}" -n "${NAMESPACE}" &>/dev/null; then
|
||||
echo "Certificate does not exist, creating..."
|
||||
cat <<EOF | kubectl apply -f -
|
||||
apiVersion: cert-manager.io/v1
|
||||
kind: Certificate
|
||||
metadata:
|
||||
name: ${CERT_NAME}
|
||||
namespace: ${NAMESPACE}
|
||||
spec:
|
||||
secretName: ${CERT_NAME}-tls
|
||||
issuerRef:
|
||||
name: letsencrypt
|
||||
kind: ClusterIssuer
|
||||
dnsNames:
|
||||
- ${DNS_NAME}
|
||||
EOF
|
||||
else
|
||||
echo "Certificate already exists"
|
||||
fi
|
||||
|
||||
# Wait for certificate to be ready
|
||||
|
||||
echo "Waiting for certificate to be ready..."
|
||||
for i in {1..600}; do
|
||||
if kubectl get secret "${CERT_NAME}-tls" -n "${NAMESPACE}" &>/dev/null; then
|
||||
echo "Certificate secret is ready!"
|
||||
break
|
||||
fi
|
||||
echo "Waiting for certificate... ($i/600)"
|
||||
sleep 1
|
||||
done
|
||||
|
||||
if ! kubectl get secret "${CERT_NAME}-tls" -n "${NAMESPACE}" &>/dev/null; then
|
||||
echo "WARNING: Certificate secret not ready after 600 seconds"
|
||||
else
|
||||
# Extract certificate and key from secret to shared volume
|
||||
echo "Extracting certificate and key..."
|
||||
kubectl get secret "${CERT_NAME}-tls" -n "${NAMESPACE}" -o jsonpath='{.data.tls\.crt}' | base64 -d > /shared/tls.crt
|
||||
kubectl get secret "${CERT_NAME}-tls" -n "${NAMESPACE}" -o jsonpath='{.data.tls\.key}' | base64 -d > /shared/tls.key
|
||||
echo "Certificate and key extracted successfully."
|
||||
cat /shared/tls.crt
|
||||
fi
|
||||
|
||||
# Create ClusterIP Service for this node (pod selector based)
|
||||
NODE_SHORT_NAME="${NODE_NAME%%.*}"
|
||||
SERVICE_NAME="${NODE_SHORT_NAME}-ingress"
|
||||
|
||||
echo "Creating Service: ${SERVICE_NAME} for node ${NODE_NAME} (short: ${NODE_SHORT_NAME})"
|
||||
|
||||
# Create Service with pod selector including node name
|
||||
cat <<EOF | kubectl apply -f -
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: ${SERVICE_NAME}
|
||||
namespace: ${NAMESPACE}
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
node: ${NODE_NAME}
|
||||
spec:
|
||||
type: ClusterIP
|
||||
selector:
|
||||
app: pasarguard-node-ingress
|
||||
node-name: ${NODE_SHORT_NAME}
|
||||
ports:
|
||||
- name: proxy
|
||||
port: 443
|
||||
protocol: TCP
|
||||
targetPort: 443
|
||||
- name: api
|
||||
port: 62050
|
||||
protocol: TCP
|
||||
targetPort: 62050
|
||||
EOF
|
||||
|
||||
echo "Service created: ${SERVICE_NAME}.${NAMESPACE}.svc.cluster.local"
|
||||
|
||||
# Create IngressRouteTCP for this DNS name with TLS passthrough
|
||||
INGRESS_NAME="pasarguard-tcp-${DNS_NAME//./-}"
|
||||
|
||||
echo "Creating IngressRouteTCP: ${INGRESS_NAME} for ${DNS_NAME}"
|
||||
|
||||
cat <<EOF | kubectl apply -f -
|
||||
apiVersion: traefik.io/v1alpha1
|
||||
kind: IngressRouteTCP
|
||||
metadata:
|
||||
name: ${INGRESS_NAME}
|
||||
namespace: ${NAMESPACE}
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
node: ${NODE_NAME}
|
||||
spec:
|
||||
entryPoints:
|
||||
- websecure
|
||||
routes:
|
||||
- match: HostSNI(\`${DNS_NAME}\`)
|
||||
services:
|
||||
- name: ${SERVICE_NAME}
|
||||
port: 443
|
||||
tls:
|
||||
passthrough: true
|
||||
EOF
|
||||
|
||||
echo "IngressRouteTCP created: ${INGRESS_NAME}"
|
||||
echo "Traffic to ${DNS_NAME}:443 will be routed to ${SERVICE_NAME}:443"
|
||||
|
||||
# Create second IngressRouteTCP for API port 62051
|
||||
INGRESS_API_NAME="pasarguard-api-${DNS_NAME//./-}"
|
||||
|
||||
echo "Creating IngressRouteTCP for API: ${INGRESS_API_NAME} for ${DNS_NAME}:62051"
|
||||
|
||||
cat <<EOF | kubectl apply -f -
|
||||
apiVersion: traefik.io/v1alpha1
|
||||
kind: IngressRouteTCP
|
||||
metadata:
|
||||
name: ${INGRESS_API_NAME}
|
||||
namespace: ${NAMESPACE}
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
node: ${NODE_NAME}
|
||||
spec:
|
||||
entryPoints:
|
||||
- pasarguard-api
|
||||
routes:
|
||||
- match: HostSNI(\`${DNS_NAME}\`)
|
||||
services:
|
||||
- name: ${SERVICE_NAME}
|
||||
port: 62050
|
||||
tls:
|
||||
passthrough: true
|
||||
EOF
|
||||
|
||||
echo "IngressRouteTCP API created: ${INGRESS_API_NAME}"
|
||||
echo "Traffic to ${DNS_NAME}:62051 will be routed to ${SERVICE_NAME}:62050"
|
||||
|
||||
pasarguard-start.sh: |
|
||||
#!/bin/sh
|
||||
# Read API_KEY from shared volume created by init container
|
||||
if [ -f /shared/api-key ]; then
|
||||
export API_KEY=$(cat /shared/api-key)
|
||||
echo "Loaded API_KEY from shared volume"
|
||||
else
|
||||
echo "WARNING: API_KEY file not found, using default"
|
||||
fi
|
||||
|
||||
cd /app
|
||||
exec ./main
|
||||
@@ -1,211 +0,0 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: pasarguard-node-ingress
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: Role
|
||||
metadata:
|
||||
name: pasarguard-node-ingress-configmap
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
rules:
|
||||
- apiGroups: [""]
|
||||
resources: ["configmaps"]
|
||||
verbs: ["get", "list", "create", "update", "patch"]
|
||||
- apiGroups: ["cert-manager.io"]
|
||||
resources: ["certificates"]
|
||||
verbs: ["get", "list", "create", "update", "patch", "delete"]
|
||||
- apiGroups: [""]
|
||||
resources: ["secrets"]
|
||||
verbs: ["get", "list"]
|
||||
- apiGroups: [""]
|
||||
resources: ["services", "endpoints"]
|
||||
verbs: ["get", "list", "create", "update", "patch", "delete"]
|
||||
- apiGroups: ["traefik.io", "traefik.containo.us"]
|
||||
resources: ["ingressroutetcps"]
|
||||
verbs: ["get", "list", "create", "update", "patch", "delete"]
|
||||
- apiGroups: [""]
|
||||
resources: ["pods"]
|
||||
verbs: ["get", "list", "patch", "update"]
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: RoleBinding
|
||||
metadata:
|
||||
name: pasarguard-node-ingress-configmap
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: Role
|
||||
name: pasarguard-node-ingress-configmap
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: pasarguard-node-ingress
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRole
|
||||
metadata:
|
||||
name: pasarguard-node-ingress-reader
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
rules:
|
||||
- apiGroups: [""]
|
||||
resources: ["nodes"]
|
||||
verbs: ["get", "list"]
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: ClusterRoleBinding
|
||||
metadata:
|
||||
name: pasarguard-node-ingress-reader
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: ClusterRole
|
||||
name: pasarguard-node-ingress-reader
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: pasarguard-node-ingress
|
||||
namespace: pasarguard
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: DaemonSet
|
||||
metadata:
|
||||
name: pasarguard-node-ingress
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: pasarguard-node-ingress
|
||||
revisionHistoryLimit: 3
|
||||
updateStrategy:
|
||||
type: RollingUpdate
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: pasarguard-node-ingress
|
||||
spec:
|
||||
serviceAccountName: pasarguard-node-ingress
|
||||
affinity:
|
||||
nodeAffinity:
|
||||
requiredDuringSchedulingIgnoredDuringExecution:
|
||||
nodeSelectorTerms:
|
||||
- matchExpressions:
|
||||
- key: xray-public-address
|
||||
operator: Exists
|
||||
initContainers:
|
||||
- name: label-pod
|
||||
image: bitnami/kubectl:latest
|
||||
env:
|
||||
- name: POD_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.name
|
||||
- name: POD_NAMESPACE
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.namespace
|
||||
- name: NODE_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.nodeName
|
||||
command:
|
||||
- /bin/bash
|
||||
- -c
|
||||
- |
|
||||
# Add node label to pod
|
||||
NODE_SHORT=$(echo ${NODE_NAME} | cut -d. -f1)
|
||||
kubectl label pod ${POD_NAME} -n ${POD_NAMESPACE} node-name=${NODE_SHORT} --overwrite
|
||||
- name: init-uuid
|
||||
image: bitnami/kubectl:latest
|
||||
env:
|
||||
- name: GODEBUG
|
||||
value: "x509sha1=1"
|
||||
- name: NODE_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.nodeName
|
||||
command:
|
||||
- /bin/bash
|
||||
- /scripts/init-uuid-ingress.sh
|
||||
volumeMounts:
|
||||
- name: shared-data
|
||||
mountPath: /shared
|
||||
- name: scripts
|
||||
mountPath: /scripts
|
||||
containers:
|
||||
- name: pasarguard-node
|
||||
image: 'pasarguard/node:v0.1.3'
|
||||
imagePullPolicy: Always
|
||||
command:
|
||||
- /bin/sh
|
||||
- /scripts/pasarguard-start.sh
|
||||
ports:
|
||||
- name: api
|
||||
containerPort: 62050
|
||||
protocol: TCP
|
||||
- name: proxy
|
||||
containerPort: 443
|
||||
protocol: TCP
|
||||
env:
|
||||
- name: NODE_NAME
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: spec.nodeName
|
||||
- name: NODE_HOST
|
||||
value: "0.0.0.0"
|
||||
- name: SERVICE_PORT
|
||||
value: "62050"
|
||||
- name: SERVICE_PROTOCOL
|
||||
value: "grpc"
|
||||
- name: DEBUG
|
||||
value: "true"
|
||||
- name: SSL_CERT_FILE
|
||||
value: "/shared/tls.crt"
|
||||
- name: SSL_KEY_FILE
|
||||
value: "/shared/tls.key"
|
||||
- name: XRAY_EXECUTABLE_PATH
|
||||
value: "/usr/local/bin/xray"
|
||||
- name: XRAY_ASSETS_PATH
|
||||
value: "/usr/local/share/xray"
|
||||
- name: API_KEY
|
||||
value: "change-this-to-a-secure-uuid"
|
||||
livenessProbe:
|
||||
tcpSocket:
|
||||
port: 62050
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
readinessProbe:
|
||||
tcpSocket:
|
||||
port: 62050
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 3
|
||||
resources:
|
||||
requests:
|
||||
memory: "128Mi"
|
||||
cpu: "300m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "1000m"
|
||||
volumeMounts:
|
||||
- name: shared-data
|
||||
mountPath: /shared
|
||||
readOnly: false
|
||||
- name: scripts
|
||||
mountPath: /scripts
|
||||
volumes:
|
||||
- name: shared-data
|
||||
emptyDir: {}
|
||||
- name: scripts
|
||||
configMap:
|
||||
name: pasarguard-scripts-ingress
|
||||
defaultMode: 0755
|
||||
@@ -112,8 +112,47 @@ spec:
|
||||
- name: scripts
|
||||
mountPath: /scripts
|
||||
containers:
|
||||
- name: xray-exporter
|
||||
image: alpine:3.18
|
||||
imagePullPolicy: IfNotPresent
|
||||
command:
|
||||
- /bin/sh
|
||||
- /scripts/exporter-start.sh
|
||||
ports:
|
||||
- name: metrics
|
||||
containerPort: 9550
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /scrape
|
||||
port: metrics
|
||||
initialDelaySeconds: 60
|
||||
periodSeconds: 30
|
||||
timeoutSeconds: 10
|
||||
failureThreshold: 3
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /scrape
|
||||
port: metrics
|
||||
initialDelaySeconds: 45
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
resources:
|
||||
requests:
|
||||
memory: "64Mi"
|
||||
cpu: "50m"
|
||||
limits:
|
||||
memory: "128Mi"
|
||||
cpu: "100m"
|
||||
volumeMounts:
|
||||
- name: shared-data
|
||||
mountPath: /shared
|
||||
readOnly: true
|
||||
- name: scripts
|
||||
mountPath: /scripts
|
||||
- name: pasarguard-node
|
||||
image: 'pasarguard/node:v0.1.4'
|
||||
image: 'pasarguard/node:v0.1.1'
|
||||
imagePullPolicy: Always
|
||||
command:
|
||||
- /bin/sh
|
||||
@@ -162,56 +201,16 @@ spec:
|
||||
resources:
|
||||
requests:
|
||||
memory: "128Mi"
|
||||
cpu: "500m"
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "1200m"
|
||||
cpu: "500m"
|
||||
volumeMounts:
|
||||
- name: shared-data
|
||||
mountPath: /shared
|
||||
readOnly: false
|
||||
- name: scripts
|
||||
mountPath: /scripts
|
||||
|
||||
- name: xray-exporter
|
||||
image: alpine:3.18
|
||||
imagePullPolicy: IfNotPresent
|
||||
command:
|
||||
- /bin/sh
|
||||
- /scripts/exporter-start.sh
|
||||
ports:
|
||||
- name: metrics
|
||||
containerPort: 9550
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /scrape
|
||||
port: metrics
|
||||
initialDelaySeconds: 60
|
||||
periodSeconds: 30
|
||||
timeoutSeconds: 10
|
||||
failureThreshold: 3
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /scrape
|
||||
port: metrics
|
||||
initialDelaySeconds: 45
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
resources:
|
||||
requests:
|
||||
memory: "64Mi"
|
||||
cpu: "50m"
|
||||
limits:
|
||||
memory: "128Mi"
|
||||
cpu: "500m"
|
||||
volumeMounts:
|
||||
- name: shared-data
|
||||
mountPath: /shared
|
||||
readOnly: true
|
||||
- name: scripts
|
||||
mountPath: /scripts
|
||||
volumes:
|
||||
- name: shared-data
|
||||
emptyDir: {}
|
||||
|
||||
@@ -34,7 +34,7 @@ spec:
|
||||
mountPath: /templates/subscription
|
||||
containers:
|
||||
- name: pasarguard-web
|
||||
image: 'pasarguard/panel:latest'
|
||||
image: 'pasarguard/panel:v1.7.2'
|
||||
imagePullPolicy: Always
|
||||
envFrom:
|
||||
- secretRef:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: pasarguard-secrets
|
||||
|
||||
@@ -9,6 +9,3 @@ resources:
|
||||
- ./certificate.yaml
|
||||
- ./configmap-scripts.yaml
|
||||
- ./servicemonitor.yaml
|
||||
- ./configmap-scripts-ingress.yaml
|
||||
# - ./daemonset-ingress.yaml
|
||||
# - ./traefik-pasarguard-entrypoint.yaml
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: traefik
|
||||
namespace: kube-system
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
containers:
|
||||
- name: traefik
|
||||
args:
|
||||
- --entryPoints.metrics.address=:9100/tcp
|
||||
- --entryPoints.traefik.address=:8080/tcp
|
||||
- --entryPoints.web.address=:8000/tcp
|
||||
- --entryPoints.websecure.address=:8443/tcp
|
||||
- --entryPoints.pasarguard-api.address=:62051/tcp
|
||||
- --api.dashboard=true
|
||||
- --ping=true
|
||||
- --metrics.prometheus=true
|
||||
- --metrics.prometheus.entrypoint=metrics
|
||||
- --providers.kubernetescrd
|
||||
- --providers.kubernetescrd.allowEmptyServices=true
|
||||
- --providers.kubernetesingress
|
||||
- --providers.kubernetesingress.allowEmptyServices=true
|
||||
- --providers.kubernetesingress.ingressendpoint.publishedservice=kube-system/traefik
|
||||
- --entryPoints.websecure.http.tls=true
|
||||
- --log.level=INFO
|
||||
- --entryPoints.web.transport.respondingTimeouts.readTimeout=0s
|
||||
- --entryPoints.websecure.transport.respondingTimeouts.readTimeout=0s
|
||||
ports:
|
||||
- containerPort: 9100
|
||||
name: metrics
|
||||
protocol: TCP
|
||||
- containerPort: 8080
|
||||
name: traefik
|
||||
protocol: TCP
|
||||
- containerPort: 8000
|
||||
name: web
|
||||
protocol: TCP
|
||||
- containerPort: 8443
|
||||
name: websecure
|
||||
protocol: TCP
|
||||
- containerPort: 62051
|
||||
name: pasarguard-api
|
||||
protocol: TCP
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: traefik
|
||||
namespace: kube-system
|
||||
spec:
|
||||
ports:
|
||||
- name: web
|
||||
port: 80
|
||||
protocol: TCP
|
||||
targetPort: web
|
||||
- name: websecure
|
||||
port: 443
|
||||
protocol: TCP
|
||||
targetPort: websecure
|
||||
- name: pasarguard-api
|
||||
port: 62051
|
||||
protocol: TCP
|
||||
targetPort: pasarguard-api
|
||||
@@ -1,21 +0,0 @@
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: Application
|
||||
metadata:
|
||||
name: remnawave
|
||||
namespace: argocd
|
||||
spec:
|
||||
project: apps
|
||||
destination:
|
||||
namespace: remnawave
|
||||
server: https://kubernetes.default.svc
|
||||
source:
|
||||
repoURL: ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
targetRevision: HEAD
|
||||
path: k8s/apps/remnawave
|
||||
syncPolicy:
|
||||
automated:
|
||||
selfHeal: true
|
||||
prune: true
|
||||
syncOptions:
|
||||
- CreateNamespace=true
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: remnawave
|
||||
labels:
|
||||
app: remnawave
|
||||
annotations:
|
||||
reloader.stakater.com/auto: "true"
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: remnawave
|
||||
replicas: 1
|
||||
strategy:
|
||||
type: RollingUpdate
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: remnawave
|
||||
spec:
|
||||
containers:
|
||||
- name: remnawave
|
||||
image: 'remnawave/backend:2'
|
||||
imagePullPolicy: Always
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: remnawave-secrets
|
||||
env:
|
||||
- name: REDIS_URL
|
||||
value: "redis://remnawave-redis:6379"
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 3000
|
||||
protocol: TCP
|
||||
- name: metrics
|
||||
containerPort: 3001
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /health
|
||||
port: 3001
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 30
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /health
|
||||
port: 3001
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 3
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: remnawave
|
||||
spec:
|
||||
selector:
|
||||
app: remnawave
|
||||
ports:
|
||||
- name: http
|
||||
protocol: TCP
|
||||
port: 3000
|
||||
targetPort: 3000
|
||||
- name: metrics
|
||||
protocol: TCP
|
||||
port: 3001
|
||||
targetPort: 3001
|
||||
@@ -1,70 +0,0 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: remnawave-secrets
|
||||
spec:
|
||||
target:
|
||||
name: remnawave-secrets
|
||||
deletionPolicy: Delete
|
||||
template:
|
||||
type: Opaque
|
||||
data:
|
||||
METRICS_USER: admin
|
||||
FRONT_END_DOMAIN: rw.hexor.cy
|
||||
SUB_PUBLIC_DOMAIN: sub.hexor.cy
|
||||
REDIS_HOST: remnawave-redis
|
||||
REDIS_PORT: "6379"
|
||||
|
||||
DATABASE_URL: |-
|
||||
postgresql://remnawave:{{ .pg_pass }}@psql.psql.svc:5432/remnawave
|
||||
JWT_AUTH_SECRET: |-
|
||||
{{ .jwt_auth_secret }}
|
||||
JWT_API_TOKENS_SECRET: |-
|
||||
{{ .jwt_api_tokens_secret }}
|
||||
METRICS_PASS: |-
|
||||
{{ .metrics_pass }}
|
||||
WEBHOOK_SECRET_HEADER: |-
|
||||
{{ .webhook_secret }}
|
||||
|
||||
data:
|
||||
- secretKey: pg_pass
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[10].value
|
||||
- secretKey: jwt_auth_secret
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: 0d090436-5e82-453a-914c-19cec2abded1
|
||||
property: fields[0].value
|
||||
- secretKey: jwt_api_tokens_secret
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: 0d090436-5e82-453a-914c-19cec2abded1
|
||||
property: fields[1].value
|
||||
- secretKey: metrics_pass
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: 0d090436-5e82-453a-914c-19cec2abded1
|
||||
property: fields[2].value
|
||||
- secretKey: webhook_secret
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: 0d090436-5e82-453a-914c-19cec2abded1
|
||||
property: fields[3].value
|
||||
@@ -1,12 +0,0 @@
|
||||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- ./external-secrets.yaml
|
||||
- ./deployment.yaml
|
||||
- ./redis-deployment.yaml
|
||||
- ./subscription-page-configmap.yaml
|
||||
- ./subscription-page-deployment.yaml
|
||||
- ./servicemonitor.yaml
|
||||
- ./user-ui-ingress.yaml
|
||||
- ./panel-ingress.yaml
|
||||
@@ -1,37 +0,0 @@
|
||||
---
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: panel-ui
|
||||
annotations:
|
||||
ingressClassName: traefik
|
||||
cert-manager.io/cluster-issuer: letsencrypt
|
||||
traefik.ingress.kubernetes.io/router.middlewares: kube-system-https-redirect@kubernetescrd
|
||||
acme.cert-manager.io/http01-edit-in-place: "true"
|
||||
spec:
|
||||
rules:
|
||||
- host: rw.hexor.cy
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: remnawave
|
||||
port:
|
||||
number: 3000
|
||||
- host: rw.hexor.ru
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: remnawave
|
||||
port:
|
||||
number: 3000
|
||||
tls:
|
||||
- secretName: remnawave-panel-tls
|
||||
hosts:
|
||||
- rw.hexor.cy
|
||||
- rw.hexor.ru
|
||||
@@ -1,71 +0,0 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: remnawave-redis
|
||||
labels:
|
||||
app: remnawave-redis
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: remnawave-redis
|
||||
replicas: 1
|
||||
strategy:
|
||||
type: RollingUpdate
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: remnawave-redis
|
||||
spec:
|
||||
containers:
|
||||
- name: redis
|
||||
image: 'valkey/valkey:8.1-alpine'
|
||||
imagePullPolicy: Always
|
||||
ports:
|
||||
- name: redis
|
||||
containerPort: 6379
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
exec:
|
||||
command:
|
||||
- valkey-cli
|
||||
- ping
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 3
|
||||
readinessProbe:
|
||||
exec:
|
||||
command:
|
||||
- valkey-cli
|
||||
- ping
|
||||
initialDelaySeconds: 5
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
failureThreshold: 3
|
||||
volumeMounts:
|
||||
- name: redis-data
|
||||
mountPath: /data
|
||||
resources:
|
||||
requests:
|
||||
memory: "64Mi"
|
||||
cpu: "50m"
|
||||
limits:
|
||||
memory: "256Mi"
|
||||
cpu: "200m"
|
||||
volumes:
|
||||
- name: redis-data
|
||||
emptyDir: {}
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: remnawave-redis
|
||||
spec:
|
||||
selector:
|
||||
app: remnawave-redis
|
||||
ports:
|
||||
- name: redis
|
||||
protocol: TCP
|
||||
port: 6379
|
||||
targetPort: 6379
|
||||
@@ -1,21 +0,0 @@
|
||||
---
|
||||
apiVersion: monitoring.coreos.com/v1
|
||||
kind: ServiceMonitor
|
||||
metadata:
|
||||
name: remnawave-metrics
|
||||
labels:
|
||||
app: remnawave
|
||||
release: prometheus
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: remnawave
|
||||
endpoints:
|
||||
- port: metrics
|
||||
path: /metrics
|
||||
interval: 30s
|
||||
scrapeTimeout: 10s
|
||||
honorLabels: true
|
||||
namespaceSelector:
|
||||
matchNames:
|
||||
- remnawave
|
||||
@@ -1,27 +0,0 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: remnawave-subscription-page-config
|
||||
labels:
|
||||
app: remnawave-subscription-page
|
||||
data:
|
||||
APP_PORT: "3010"
|
||||
REMNAWAVE_PANEL_URL: "https://rw.hexor.cy"
|
||||
META_TITLE: "RemnaWave Subscription"
|
||||
META_DESCRIPTION: "Your VPN subscription portal"
|
||||
META_KEYWORDS: "vpn,subscription,remnawave"
|
||||
META_AUTHOR: "RemnaWave"
|
||||
ENABLE_ANALYTICS: "false"
|
||||
ANALYTICS_MEASUREMENT_ID: ""
|
||||
CUSTOM_SUB_PREFIX: ""
|
||||
THEME: "dark"
|
||||
CUSTOM_LOGO_URL: ""
|
||||
SHOW_SUBSCRIPTION_INFO: "true"
|
||||
SHOW_CONNECTION_INFO: "true"
|
||||
SHOW_QR_CODE: "true"
|
||||
QR_CODE_SIZE: "256"
|
||||
REFRESH_INTERVAL: "30000"
|
||||
SUBSCRIPTION_TEXT_COLOR: "#ffffff"
|
||||
BACKGROUND_COLOR: "#1a1a1a"
|
||||
ACCENT_COLOR: "#007bff"
|
||||
@@ -1,52 +0,0 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: remnawave-subscription-page
|
||||
labels:
|
||||
app: remnawave-subscription-page
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: remnawave-subscription-page
|
||||
replicas: 1
|
||||
strategy:
|
||||
type: RollingUpdate
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: remnawave-subscription-page
|
||||
spec:
|
||||
containers:
|
||||
- name: subscription-page
|
||||
image: 'remnawave/subscription-page:latest'
|
||||
imagePullPolicy: Always
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: remnawave-subscription-page-config
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 3010
|
||||
protocol: TCP
|
||||
resources:
|
||||
requests:
|
||||
memory: "64Mi"
|
||||
cpu: "50m"
|
||||
limits:
|
||||
memory: "256Mi"
|
||||
cpu: "200m"
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: remnawave-subscription-page
|
||||
labels:
|
||||
app: remnawave-subscription-page
|
||||
spec:
|
||||
selector:
|
||||
app: remnawave-subscription-page
|
||||
ports:
|
||||
- name: http
|
||||
protocol: TCP
|
||||
port: 3010
|
||||
targetPort: 3010
|
||||
@@ -1,37 +0,0 @@
|
||||
---
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: user-ui
|
||||
annotations:
|
||||
ingressClassName: traefik
|
||||
cert-manager.io/cluster-issuer: letsencrypt
|
||||
traefik.ingress.kubernetes.io/router.middlewares: kube-system-https-redirect@kubernetescrd
|
||||
acme.cert-manager.io/http01-edit-in-place: "true"
|
||||
spec:
|
||||
rules:
|
||||
- host: sub.hexor.cy
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: remnawave-subscription-page
|
||||
port:
|
||||
number: 3010
|
||||
- host: sub.hexor.ru
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: remnawave-subscription-page
|
||||
port:
|
||||
number: 3010
|
||||
tls:
|
||||
- secretName: remnawave-user-ui-tls
|
||||
hosts:
|
||||
- sub.hexor.cy
|
||||
- sub.hexor.ru
|
||||
@@ -27,7 +27,7 @@ spec:
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "750m"
|
||||
cpu: "500m"
|
||||
command: ["hbbs"]
|
||||
args:
|
||||
- "--relay-servers"
|
||||
@@ -98,7 +98,7 @@ spec:
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "750m"
|
||||
cpu: "500m"
|
||||
command: ["hbbr"]
|
||||
args:
|
||||
- "--port"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: rustdesk-keys
|
||||
|
||||
@@ -4,7 +4,7 @@ resources:
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "1Gi"
|
||||
cpu: "750m"
|
||||
cpu: "500m"
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ resources:
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "1Gi"
|
||||
cpu: "750m"
|
||||
cpu: "500m"
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ resources:
|
||||
cpu: "200m"
|
||||
limits:
|
||||
memory: "2Gi"
|
||||
cpu: "1500m"
|
||||
cpu: "1000m"
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ resources:
|
||||
cpu: "200m"
|
||||
limits:
|
||||
memory: "2Gi"
|
||||
cpu: "1500m"
|
||||
cpu: "1000m"
|
||||
|
||||
probes:
|
||||
liveness:
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
@@ -22,7 +23,7 @@ spec:
|
||||
kubernetes.io/hostname: home.homenet
|
||||
containers:
|
||||
- name: desubot
|
||||
image: "ultradesu/desubot:latest"
|
||||
image: 'ultradesu/desubot:latest'
|
||||
imagePullPolicy: Always
|
||||
envFrom:
|
||||
- secretRef:
|
||||
@@ -31,11 +32,11 @@ spec:
|
||||
- name: RUST_LOG
|
||||
value: "info"
|
||||
volumeMounts:
|
||||
- mountPath: /storage
|
||||
name: storage
|
||||
- mountPath: /storage
|
||||
name: storage
|
||||
volumes:
|
||||
- name: storage
|
||||
persistentVolumeClaim:
|
||||
claimName: desubot-storage
|
||||
readOnly: false
|
||||
|
||||
nfs:
|
||||
server: nas.homenet
|
||||
path: /mnt/storage/Storage/k8s/desubot/
|
||||
readOnly: false
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: get-id-bot
|
||||
@@ -24,7 +24,7 @@ spec:
|
||||
property: fields[0].value
|
||||
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: desubot
|
||||
|
||||
@@ -30,7 +30,7 @@ spec:
|
||||
name: get-id-bot
|
||||
env:
|
||||
- name: RUST_LOG
|
||||
value: "info,teloxide::error_handlers=off"
|
||||
value: "info"
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -7,5 +7,3 @@ resources:
|
||||
- get-id-bot.yaml
|
||||
- external-secrets.yaml
|
||||
- desubot.yaml
|
||||
- restart-job.yaml
|
||||
- storage.yaml
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ServiceAccount
|
||||
metadata:
|
||||
name: tg-bots-restart-sa
|
||||
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: Role
|
||||
metadata:
|
||||
name: tg-bots-restart-role
|
||||
rules:
|
||||
- apiGroups: ["apps"]
|
||||
resources: ["deployments"]
|
||||
verbs: ["get", "patch"]
|
||||
|
||||
---
|
||||
apiVersion: rbac.authorization.k8s.io/v1
|
||||
kind: RoleBinding
|
||||
metadata:
|
||||
name: tg-bots-restart-rb
|
||||
subjects:
|
||||
- kind: ServiceAccount
|
||||
name: tg-bots-restart-sa
|
||||
roleRef:
|
||||
apiGroup: rbac.authorization.k8s.io
|
||||
kind: Role
|
||||
name: tg-bots-restart-role
|
||||
|
||||
---
|
||||
apiVersion: batch/v1
|
||||
kind: CronJob
|
||||
metadata:
|
||||
name: tg-bots-daily-restart
|
||||
spec:
|
||||
schedule: "0 4 * * *" # every day at 04:00
|
||||
jobTemplate:
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
serviceAccountName: tg-bots-restart-sa
|
||||
restartPolicy: OnFailure
|
||||
containers:
|
||||
- name: kubectl
|
||||
image: bitnami/kubectl:latest
|
||||
env:
|
||||
- name: POD_NAMESPACE
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: metadata.namespace
|
||||
command:
|
||||
- /bin/sh
|
||||
- -c
|
||||
- |
|
||||
kubectl -n "$POD_NAMESPACE" rollout restart deployment/desubot
|
||||
kubectl -n "$POD_NAMESPACE" rollout restart deployment/get-id-bot
|
||||
@@ -1,13 +0,0 @@
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: desubot-storage
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteMany
|
||||
storageClassName: nfs-csi
|
||||
resources:
|
||||
requests:
|
||||
storage: 200Gi
|
||||
@@ -37,7 +37,7 @@ spec:
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "1Gi"
|
||||
cpu: "750m"
|
||||
cpu: "500m"
|
||||
env:
|
||||
- name: DOMAIN
|
||||
value: https://vw.hexor.cy
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: admin-token
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: outfleet-secrets
|
||||
@@ -51,7 +51,7 @@ spec:
|
||||
property: fields[1].value
|
||||
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: outline-config
|
||||
|
||||
@@ -174,7 +174,7 @@ spec:
|
||||
resources:
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "750m"
|
||||
cpu: "500m"
|
||||
requests:
|
||||
memory: "256Mi"
|
||||
cpu: "250m"
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: Application
|
||||
metadata:
|
||||
name: xandikos
|
||||
namespace: argocd
|
||||
spec:
|
||||
project: apps
|
||||
destination:
|
||||
namespace: xandikos
|
||||
server: https://kubernetes.default.svc
|
||||
source:
|
||||
repoURL: ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
targetRevision: HEAD
|
||||
path: k8s/apps/xandikos
|
||||
syncPolicy:
|
||||
automated:
|
||||
selfHeal: true
|
||||
prune: true
|
||||
syncOptions:
|
||||
- CreateNamespace=true
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: xandikos
|
||||
labels:
|
||||
app: xandikos
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: xandikos
|
||||
replicas: 1
|
||||
strategy:
|
||||
type: RollingUpdate
|
||||
rollingUpdate:
|
||||
maxSurge: 1
|
||||
maxUnavailable: 0
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: xandikos
|
||||
spec:
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
volumes:
|
||||
- name: storage
|
||||
hostPath:
|
||||
path: /k8s/xandikos
|
||||
type: Directory
|
||||
containers:
|
||||
- name: xandikos
|
||||
image: ghcr.io/jelmer/xandikos:latest
|
||||
imagePullPolicy: Always
|
||||
command:
|
||||
- "python3"
|
||||
- "-m"
|
||||
- "xandikos.web"
|
||||
- "--port=8081"
|
||||
- "-d/data"
|
||||
- "--defaults"
|
||||
- "--listen-address=0.0.0.0"
|
||||
- "--route-prefix=/dav"
|
||||
resources:
|
||||
requests:
|
||||
memory: "64Mi"
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "1000m"
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 8081
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 8081
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8081
|
||||
protocol: TCP
|
||||
volumeMounts:
|
||||
- name: storage
|
||||
mountPath: /data
|
||||
@@ -1,31 +0,0 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: mmdl-secrets
|
||||
spec:
|
||||
target:
|
||||
name: mmdl-secrets
|
||||
deletionPolicy: Delete
|
||||
template:
|
||||
type: Opaque
|
||||
data:
|
||||
DB_DIALECT: 'postgres'
|
||||
DB_HOST: psql.psql.svc
|
||||
DB_USER: mmdl
|
||||
DB_NAME: mmdl
|
||||
DB_PORT: "5432"
|
||||
DB_PASS: |-
|
||||
{{ .pg_pass }}
|
||||
AES_PASSWORD: |-
|
||||
{{ .pg_pass }}
|
||||
|
||||
data:
|
||||
- secretKey: pg_pass
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[12].value
|
||||
@@ -1,47 +0,0 @@
|
||||
---
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: xandikos
|
||||
annotations:
|
||||
ingressClassName: traefik
|
||||
cert-manager.io/cluster-issuer: letsencrypt
|
||||
traefik.ingress.kubernetes.io/router.middlewares: kube-system-https-redirect@kubernetescrd
|
||||
acme.cert-manager.io/http01-edit-in-place: "true"
|
||||
spec:
|
||||
rules:
|
||||
- host: cal.hexor.cy
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: mmdl
|
||||
port:
|
||||
number: 3000
|
||||
- path: /dav
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: xandikos
|
||||
port:
|
||||
number: 8081
|
||||
- path: /.well-known/carddav
|
||||
pathType: Exact
|
||||
backend:
|
||||
service:
|
||||
name: xandikos
|
||||
port:
|
||||
number: 8081
|
||||
- path: /.well-known/caldav
|
||||
pathType: Exact
|
||||
backend:
|
||||
service:
|
||||
name: xandikos
|
||||
port:
|
||||
number: 8081
|
||||
tls:
|
||||
- secretName: xandikos-tls
|
||||
hosts:
|
||||
- cal.hexor.cy
|
||||
@@ -1,11 +0,0 @@
|
||||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- deployment.yaml
|
||||
- service.yaml
|
||||
- mmdl-deployment.yaml
|
||||
- mmdl-service.yaml
|
||||
- ingress.yaml
|
||||
- external-secrets.yaml
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: mmdl
|
||||
labels:
|
||||
app: mmdl
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: mmdl
|
||||
replicas: 1
|
||||
strategy:
|
||||
type: RollingUpdate
|
||||
rollingUpdate:
|
||||
maxSurge: 1
|
||||
maxUnavailable: 0
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: mmdl
|
||||
spec:
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
containers:
|
||||
- name: mmdl
|
||||
image: intriin/mmdl:latest
|
||||
imagePullPolicy: Always
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: mmdl-secrets
|
||||
env:
|
||||
- name: NEXTAUTH_URL
|
||||
value: "https://cal.hexor.cy"
|
||||
- name: CALDAV_SERVER_URL
|
||||
value: "https://cal.hexor.cy/dav"
|
||||
resources:
|
||||
requests:
|
||||
memory: "128Mi"
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "1000m"
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 3000
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 3000
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 5
|
||||
timeoutSeconds: 3
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 3000
|
||||
protocol: TCP
|
||||
@@ -1,14 +0,0 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: mmdl
|
||||
spec:
|
||||
selector:
|
||||
app: mmdl
|
||||
type: ClusterIP
|
||||
ports:
|
||||
- name: http
|
||||
port: 3000
|
||||
protocol: TCP
|
||||
targetPort: 3000
|
||||
@@ -1,16 +0,0 @@
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: xandikos
|
||||
labels:
|
||||
app: xandikos
|
||||
spec:
|
||||
selector:
|
||||
app: xandikos
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 8081
|
||||
targetPort: 8081
|
||||
name: http
|
||||
type: ClusterIP
|
||||
@@ -47,20 +47,3 @@ spec:
|
||||
server: https://kubernetes.default.svc
|
||||
sourceRepos:
|
||||
- ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
|
||||
---
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: AppProject
|
||||
metadata:
|
||||
name: desktop
|
||||
namespace: argocd
|
||||
spec:
|
||||
clusterResourceWhitelist:
|
||||
- group: '*'
|
||||
kind: '*'
|
||||
description: Hexor Home Lab Desktop Apps
|
||||
destinations:
|
||||
- namespace: '*'
|
||||
server: https://kubernetes.default.svc
|
||||
sourceRepos:
|
||||
- ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: oidc-creds
|
||||
|
||||
@@ -10,7 +10,7 @@ resources:
|
||||
helmCharts:
|
||||
- name: argo-cd
|
||||
repo: https://argoproj.github.io/argo-helm
|
||||
version: 9.1.4
|
||||
version: 8.1.3
|
||||
releaseName: argocd
|
||||
namespace: argocd
|
||||
valuesFile: values.yaml
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
global:
|
||||
domain: ag.hexor.cy
|
||||
nodeSelector: &nodeSelector
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
logging:
|
||||
format: text
|
||||
@@ -55,15 +55,15 @@ configs:
|
||||
|
||||
controller:
|
||||
replicas: 1
|
||||
nodeSelector:
|
||||
<<: *nodeSelector
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
# Add resources (requests/limits), PDB etc. if needed
|
||||
|
||||
# Dex OIDC provider
|
||||
dex:
|
||||
replicas: 1
|
||||
nodeSelector:
|
||||
<<: *nodeSelector
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
enabled: false
|
||||
|
||||
# Standard Redis disabled because Redis HA is enabled
|
||||
@@ -86,7 +86,7 @@ redis-ha:
|
||||
server:
|
||||
replicas: 1
|
||||
nodeSelector:
|
||||
<<: *nodeSelector
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
ingress:
|
||||
enabled: false
|
||||
|
||||
@@ -99,11 +99,8 @@ server:
|
||||
# Repository Server
|
||||
repoServer:
|
||||
replicas: 1
|
||||
livenessProbe:
|
||||
timeoutSeconds: 10
|
||||
periodSeconds: 60
|
||||
nodeSelector:
|
||||
<<: *nodeSelector
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
# Add resources (requests/limits), PDB etc. if needed
|
||||
|
||||
# ApplicationSet Controller
|
||||
@@ -111,7 +108,7 @@ applicationSet:
|
||||
enabled: true # Enabled by default
|
||||
replicas: 1
|
||||
nodeSelector:
|
||||
<<: *nodeSelector
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
# Add resources (requests/limits), PDB etc. if needed
|
||||
|
||||
# Notifications Controller
|
||||
@@ -119,5 +116,5 @@ notifications:
|
||||
enabled: true # Enabled by default
|
||||
replicas: 1
|
||||
nodeSelector:
|
||||
<<: *nodeSelector
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
# Add notifiers, triggers, templates configurations if needed
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: authentik-creds
|
||||
|
||||
@@ -47,7 +47,6 @@ server:
|
||||
- minecraft.hexor.cy # Minecraft UI and server
|
||||
- pass.hexor.cy # k8s-secret for openai
|
||||
- ps.hexor.cy # pasarguard UI
|
||||
# - rw.hexor.cy # RemnaWave UI
|
||||
tls:
|
||||
- secretName: idm-tls
|
||||
hosts:
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: cloudflare-creds
|
||||
@@ -22,7 +22,7 @@ spec:
|
||||
key: 8ae1dcb1-1182-48a1-8733-ca1144ea754b
|
||||
property: fields[0].value
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: aws-creds
|
||||
|
||||
@@ -10,7 +10,7 @@ resources:
|
||||
helmCharts:
|
||||
- name: cert-manager
|
||||
repo: https://charts.jetstack.io
|
||||
version: 1.19.1
|
||||
version: 1.17.1
|
||||
releaseName: cert-manager
|
||||
namespace: cert-manager
|
||||
valuesFile: values.yaml
|
||||
|
||||
@@ -1,6 +1,2 @@
|
||||
crds:
|
||||
enabled: true
|
||||
prometheus:
|
||||
enabled: true
|
||||
servicemonitor:
|
||||
enabled: true
|
||||
@@ -18,4 +18,4 @@ spec:
|
||||
prune: true
|
||||
syncOptions:
|
||||
- CreateNamespace=true
|
||||
- ServerSideApply=true
|
||||
|
||||
|
||||
@@ -42,10 +42,10 @@ spec:
|
||||
resources:
|
||||
requests:
|
||||
memory: "128Mi"
|
||||
cpu: "300m"
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "800m"
|
||||
cpu: "500m"
|
||||
env:
|
||||
- name: BW_HOST
|
||||
valueFrom:
|
||||
@@ -128,7 +128,7 @@ spec:
|
||||
app.kubernetes.io/name: external-secrets
|
||||
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ClusterSecretStore
|
||||
metadata:
|
||||
name: vaultwarden-login
|
||||
@@ -141,7 +141,7 @@ spec:
|
||||
result:
|
||||
jsonPath: "$.data.{{ .remoteRef.property }}"
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ClusterSecretStore
|
||||
metadata:
|
||||
name: vaultwarden-fields
|
||||
|
||||
@@ -2,12 +2,13 @@ apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- app.yaml
|
||||
- bitwarden-store.yaml
|
||||
|
||||
helmCharts:
|
||||
- name: external-secrets
|
||||
repo: https://charts.external-secrets.io
|
||||
version: 1.1.0
|
||||
version: 0.16.2
|
||||
releaseName: external-secrets
|
||||
namespace: external-secrets
|
||||
valuesFile: values.yaml
|
||||
|
||||
@@ -3,6 +3,5 @@ kind: Kustomization
|
||||
|
||||
resources:
|
||||
- app.yaml
|
||||
- nfs-storage.yaml
|
||||
- coredns-internal-resolve.yaml
|
||||
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
---
|
||||
apiVersion: storage.k8s.io/v1
|
||||
kind: StorageClass
|
||||
metadata:
|
||||
name: nfs-csi
|
||||
provisioner: nfs.csi.k8s.io
|
||||
parameters:
|
||||
server: 10.0.5.2
|
||||
share: /mnt/storage/Storage/PVC
|
||||
reclaimPolicy: Retain
|
||||
volumeBindingMode: Immediate
|
||||
mountOptions:
|
||||
- vers=4.1
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: postgres-creds
|
||||
@@ -90,7 +90,7 @@ spec:
|
||||
key: 832042b9-7edb-4f4c-9254-3c8884ba9733
|
||||
property: fields[2].value
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: postgres-users
|
||||
@@ -119,12 +119,6 @@ spec:
|
||||
{{ .outfleet_rs }}
|
||||
USER_pasarguard: |-
|
||||
{{ .pasarguard }}
|
||||
USER_remnawave: |-
|
||||
{{ .remnawave }}
|
||||
USER_umami: |-
|
||||
{{ .umami }}
|
||||
USER_mmdl: |-
|
||||
{{ .mmdl }}
|
||||
data:
|
||||
- secretKey: authentik
|
||||
sourceRef:
|
||||
@@ -225,36 +219,3 @@ spec:
|
||||
metadataPolicy: None
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[9].value
|
||||
- secretKey: remnawave
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[10].value
|
||||
- secretKey: umami
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[11].value
|
||||
- secretKey: mmdl
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[12].value
|
||||
|
||||
@@ -28,7 +28,7 @@ spec:
|
||||
type: DirectoryOrCreate
|
||||
containers:
|
||||
- name: psql
|
||||
image: 'bitnamilegacy/postgresql:17'
|
||||
image: 'bitnami/postgresql:17'
|
||||
env:
|
||||
- name: POSTGRESQL_PASSWORD
|
||||
valueFrom:
|
||||
@@ -63,7 +63,7 @@ spec:
|
||||
containerPort: 9187
|
||||
protocol: TCP
|
||||
- name: user-creation
|
||||
image: 'bitnamilegacy/postgresql:17'
|
||||
image: 'bitnami/postgresql:17'
|
||||
command:
|
||||
- /bin/bash
|
||||
- -c
|
||||
|
||||
@@ -28,7 +28,7 @@ spec:
|
||||
type: DirectoryOrCreate
|
||||
containers:
|
||||
- name: psql
|
||||
image: 'bitnamilegacy/postgresql:17'
|
||||
image: 'bitnami/postgresql:17'
|
||||
env:
|
||||
- name: POSTGRESQL_PASSWORD
|
||||
valueFrom:
|
||||
@@ -60,7 +60,7 @@ spec:
|
||||
containerPort: 9187
|
||||
protocol: TCP
|
||||
- name: user-creation
|
||||
image: 'bitnamilegacy/postgresql:17'
|
||||
image: 'bitnami/postgresql:17'
|
||||
command:
|
||||
- /bin/bash
|
||||
- -c
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
image:
|
||||
tag: "latest"
|
||||
tag: "9.9"
|
||||
pullPolicy: Always
|
||||
env:
|
||||
email: "postgres@hexor.cy"
|
||||
|
||||
@@ -13,6 +13,9 @@ spec:
|
||||
targetRevision: HEAD
|
||||
path: k8s/core/prom-stack
|
||||
syncPolicy:
|
||||
automated:
|
||||
selfHeal: true
|
||||
prune: true
|
||||
syncOptions:
|
||||
- CreateNamespace=true
|
||||
- ServerSideApply=true
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
apiVersion: external-secrets.io/v1beta1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: grafana-admin
|
||||
@@ -79,83 +79,3 @@ spec:
|
||||
key: 2a9deb39-ef22-433e-a1be-df1555625e22
|
||||
property: fields[2].value
|
||||
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: alertmanager-telegram
|
||||
spec:
|
||||
target:
|
||||
name: alertmanager-telegram-secret
|
||||
deletionPolicy: Delete
|
||||
template:
|
||||
type: Opaque
|
||||
data:
|
||||
TELEGRAM_BOT_TOKEN: |-
|
||||
{{ .bot_token }}
|
||||
TELEGRAM_CHAT_ID: |-
|
||||
{{ .chat_id }}
|
||||
data:
|
||||
- secretKey: bot_token
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: eca0fb0b-3939-40a8-890a-6294863e5a65
|
||||
property: fields[0].value
|
||||
- secretKey: chat_id
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: eca0fb0b-3939-40a8-890a-6294863e5a65
|
||||
property: fields[1].value
|
||||
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: grafana-telegram
|
||||
spec:
|
||||
target:
|
||||
name: grafana-telegram
|
||||
deletionPolicy: Delete
|
||||
template:
|
||||
type: Opaque
|
||||
data:
|
||||
bot-token: |-
|
||||
{{ .bot_token }}
|
||||
chat-id: |-
|
||||
{{ .chat_id }}
|
||||
data:
|
||||
- secretKey: bot_token
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: eca0fb0b-3939-40a8-890a-6294863e5a65
|
||||
property: fields[0].value
|
||||
- secretKey: chat_id
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
conversionStrategy: Default
|
||||
decodingStrategy: None
|
||||
metadataPolicy: None
|
||||
key: eca0fb0b-3939-40a8-890a-6294863e5a65
|
||||
property: fields[1].value
|
||||
|
||||
|
||||
@@ -1,152 +0,0 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: grafana-alerting
|
||||
namespace: prometheus
|
||||
data:
|
||||
rules.yaml: |
|
||||
apiVersion: 1
|
||||
groups:
|
||||
- orgId: 1
|
||||
name: pasarguard_alerts
|
||||
folder: Kubernetes
|
||||
interval: 1m
|
||||
rules:
|
||||
- uid: pasarguard_cpu_throttling
|
||||
title: VPN CPU Throttle
|
||||
condition: B
|
||||
data:
|
||||
- refId: A
|
||||
relativeTimeRange:
|
||||
from: 600
|
||||
to: 0
|
||||
datasourceUid: P76F38748CEC837F0
|
||||
model:
|
||||
expr: 'rate(container_cpu_cfs_throttled_periods_total{container="pasarguard-node"}[5m])'
|
||||
refId: A
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: B
|
||||
relativeTimeRange:
|
||||
from: 600
|
||||
to: 0
|
||||
datasourceUid: __expr__
|
||||
model:
|
||||
conditions:
|
||||
- evaluator:
|
||||
params:
|
||||
- 0.1
|
||||
type: gt
|
||||
operator:
|
||||
type: and
|
||||
query:
|
||||
params: []
|
||||
datasource:
|
||||
type: __expr__
|
||||
uid: __expr__
|
||||
expression: A
|
||||
reducer: last
|
||||
refId: B
|
||||
type: reduce
|
||||
noDataState: NoData
|
||||
execErrState: Alerting
|
||||
for: 5m
|
||||
annotations:
|
||||
pod: '{{ $labels.pod }}'
|
||||
node: '{{ $labels.node }}'
|
||||
namespace: '{{ $labels.namespace }}'
|
||||
throttle_rate: '{{ printf "%.2f" $values.A }}'
|
||||
summary: 'VPN node throttling CPU'
|
||||
labels:
|
||||
severity: warning
|
||||
|
||||
- orgId: 1
|
||||
name: kubernetes_alerts
|
||||
folder: Kubernetes
|
||||
interval: 30s
|
||||
rules:
|
||||
- uid: node_not_ready
|
||||
title: Kubernetes Node Not Ready
|
||||
condition: B
|
||||
data:
|
||||
- refId: A
|
||||
relativeTimeRange:
|
||||
from: 300
|
||||
to: 0
|
||||
datasourceUid: P76F38748CEC837F0
|
||||
model:
|
||||
expr: 'kube_node_status_condition{condition="Ready",status="true"} == 0'
|
||||
refId: A
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: B
|
||||
relativeTimeRange:
|
||||
from: 300
|
||||
to: 0
|
||||
datasourceUid: __expr__
|
||||
model:
|
||||
conditions:
|
||||
- evaluator:
|
||||
params:
|
||||
- 0
|
||||
type: gt
|
||||
operator:
|
||||
type: and
|
||||
query:
|
||||
params: []
|
||||
datasource:
|
||||
type: __expr__
|
||||
uid: __expr__
|
||||
expression: A
|
||||
reducer: last
|
||||
refId: B
|
||||
type: reduce
|
||||
noDataState: Alerting
|
||||
execErrState: Alerting
|
||||
for: 0s
|
||||
annotations:
|
||||
node: '{{ $labels.node }}'
|
||||
condition: '{{ $labels.condition }}'
|
||||
summary: 'Kubernetes node is not ready'
|
||||
labels:
|
||||
severity: critical
|
||||
|
||||
contactpoints.yaml: |
|
||||
apiVersion: 1
|
||||
contactPoints:
|
||||
- orgId: 1
|
||||
name: telegram
|
||||
receivers:
|
||||
- uid: telegram_default
|
||||
type: telegram
|
||||
disableResolveMessage: false
|
||||
settings:
|
||||
bottoken: $TELEGRAM_BOT_TOKEN
|
||||
chatid: "124317807"
|
||||
message: |
|
||||
{{ if eq .Status "firing" }}🔥 FIRING{{ else }}✅ RESOLVED{{ end }}
|
||||
|
||||
{{ range .Alerts }}
|
||||
📊 <b>{{ .Labels.alertname }}</b>
|
||||
{{ .Annotations.summary }}
|
||||
|
||||
{{ if .Annotations.node }}🖥 <b>Node:</b> <code>{{ .Annotations.node }}</code>{{ end }}
|
||||
{{ if .Annotations.pod }}📦 <b>Pod:</b> <code>{{ .Annotations.pod }}</code>{{ end }}
|
||||
{{ if .Annotations.namespace }}📁 <b>Namespace:</b> <code>{{ .Annotations.namespace }}</code>{{ end }}
|
||||
{{ if .Annotations.throttle_rate }}⚠️ <b>Throttling rate:</b> {{ .Annotations.throttle_rate }}{{ end }}
|
||||
|
||||
🔗 <a href="{{ .GeneratorURL }}">View in Grafana</a>
|
||||
{{ end }}
|
||||
parse_mode: HTML
|
||||
|
||||
policies.yaml: |
|
||||
apiVersion: 1
|
||||
policies:
|
||||
- orgId: 1
|
||||
receiver: telegram
|
||||
group_by:
|
||||
- grafana_folder
|
||||
- alertname
|
||||
group_wait: 10s
|
||||
group_interval: 5m
|
||||
repeat_interval: 4h
|
||||
@@ -38,10 +38,6 @@ datasources:
|
||||
url: http://prometheus-kube-prometheus-prometheus.prometheus.svc:9090
|
||||
access: proxy
|
||||
isDefault: true
|
||||
- name: Loki
|
||||
type: loki
|
||||
url: http://loki-gateway.prometheus.svc:80
|
||||
access: proxy
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
@@ -56,30 +52,3 @@ ingress:
|
||||
hosts:
|
||||
- '*.hexor.cy'
|
||||
|
||||
extraConfigmapMounts:
|
||||
- name: grafana-alerting-rules
|
||||
mountPath: /etc/grafana/provisioning/alerting/rules.yaml
|
||||
configMap: grafana-alerting
|
||||
subPath: rules.yaml
|
||||
readOnly: true
|
||||
- name: grafana-alerting-contactpoints
|
||||
mountPath: /etc/grafana/provisioning/alerting/contactpoints.yaml
|
||||
configMap: grafana-alerting
|
||||
subPath: contactpoints.yaml
|
||||
readOnly: true
|
||||
- name: grafana-alerting-policies
|
||||
mountPath: /etc/grafana/provisioning/alerting/policies.yaml
|
||||
configMap: grafana-alerting
|
||||
subPath: policies.yaml
|
||||
readOnly: true
|
||||
|
||||
envValueFrom:
|
||||
TELEGRAM_BOT_TOKEN:
|
||||
secretKeyRef:
|
||||
name: grafana-telegram
|
||||
key: bot-token
|
||||
TELEGRAM_CHAT_ID:
|
||||
secretKeyRef:
|
||||
name: grafana-telegram
|
||||
key: chat-id
|
||||
|
||||
|
||||
@@ -2,14 +2,14 @@ apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- app.yaml
|
||||
- persistentVolume.yaml
|
||||
- external-secrets.yaml
|
||||
- grafana-alerting-configmap.yaml
|
||||
|
||||
helmCharts:
|
||||
- name: kube-prometheus-stack
|
||||
repo: https://prometheus-community.github.io/helm-charts
|
||||
version: 79.7.1
|
||||
version: 70.4.2
|
||||
releaseName: prometheus
|
||||
namespace: prometheus
|
||||
valuesFile: prom-values.yaml
|
||||
@@ -17,24 +17,9 @@ helmCharts:
|
||||
|
||||
- name: grafana
|
||||
repo: https://grafana.github.io/helm-charts
|
||||
version: 10.2.0
|
||||
version: 8.11.4
|
||||
releaseName: grafana
|
||||
namespace: prometheus
|
||||
valuesFile: grafana-values.yaml
|
||||
includeCRDs: true
|
||||
|
||||
- name: loki
|
||||
repo: https://grafana.github.io/helm-charts
|
||||
version: 6.29.0
|
||||
releaseName: loki
|
||||
namespace: prometheus
|
||||
valuesFile: loki-values.yaml
|
||||
includeCRDs: true
|
||||
|
||||
- name: promtail
|
||||
repo: https://grafana.github.io/helm-charts
|
||||
version: 6.16.6
|
||||
releaseName: promtail
|
||||
namespace: prometheus
|
||||
valuesFile: promtail-values.yaml
|
||||
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
# Loki SingleBinary mode - optimal for homelab
|
||||
deploymentMode: SingleBinary
|
||||
|
||||
loki:
|
||||
auth_enabled: false
|
||||
commonConfig:
|
||||
replication_factor: 1
|
||||
path_prefix: /var/loki
|
||||
schemaConfig:
|
||||
configs:
|
||||
- from: 2024-01-01
|
||||
store: tsdb
|
||||
object_store: filesystem
|
||||
schema: v13
|
||||
index:
|
||||
prefix: index_
|
||||
period: 24h
|
||||
storage:
|
||||
type: filesystem
|
||||
filesystem:
|
||||
chunks_directory: /var/loki/chunks
|
||||
rules_directory: /var/loki/rules
|
||||
limits_config:
|
||||
reject_old_samples: false
|
||||
ingestion_rate_mb: 16
|
||||
ingestion_burst_size_mb: 32
|
||||
max_query_parallelism: 32
|
||||
volume_enabled: true
|
||||
|
||||
singleBinary:
|
||||
replicas: 1
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: master.tail2fe2d.ts.net
|
||||
persistence:
|
||||
enabled: true
|
||||
size: 50Gi
|
||||
storageClass: ""
|
||||
|
||||
# Disable distributed mode components
|
||||
read:
|
||||
replicas: 0
|
||||
write:
|
||||
replicas: 0
|
||||
backend:
|
||||
replicas: 0
|
||||
|
||||
# Disable memcached (not needed for SingleBinary)
|
||||
chunksCache:
|
||||
enabled: false
|
||||
resultsCache:
|
||||
enabled: false
|
||||
|
||||
# Gateway for Loki access
|
||||
gateway:
|
||||
enabled: true
|
||||
replicas: 1
|
||||
service:
|
||||
type: ClusterIP
|
||||
|
||||
# Disable tests and canary
|
||||
test:
|
||||
enabled: false
|
||||
lokiCanary:
|
||||
enabled: false
|
||||
|
||||
# Monitoring
|
||||
monitoring:
|
||||
dashboards:
|
||||
enabled: false
|
||||
rules:
|
||||
enabled: false
|
||||
serviceMonitor:
|
||||
enabled: false
|
||||
selfMonitoring:
|
||||
enabled: false
|
||||
@@ -1,35 +1,5 @@
|
||||
grafana:
|
||||
enabled: false
|
||||
|
||||
alertmanager:
|
||||
config:
|
||||
global:
|
||||
telegram_api_url: "https://api.telegram.org"
|
||||
route:
|
||||
group_by: ['alertname', 'cluster', 'service']
|
||||
group_wait: 10s
|
||||
group_interval: 10s
|
||||
repeat_interval: 12h
|
||||
receiver: 'telegram'
|
||||
receivers:
|
||||
- name: 'telegram'
|
||||
telegram_configs:
|
||||
- bot_token: '${TELEGRAM_BOT_TOKEN}'
|
||||
chat_id: ${TELEGRAM_CHAT_ID}
|
||||
parse_mode: 'HTML'
|
||||
message: |
|
||||
{{ range .Alerts }}
|
||||
<b>{{ .Labels.alertname }}</b>
|
||||
{{ if .Labels.severity }}<b>Severity:</b> {{ .Labels.severity }}{{ end }}
|
||||
<b>Status:</b> {{ .Status }}
|
||||
{{ if .Annotations.summary }}<b>Summary:</b> {{ .Annotations.summary }}{{ end }}
|
||||
{{ if .Annotations.description }}<b>Description:</b> {{ .Annotations.description }}{{ end }}
|
||||
{{ end }}
|
||||
|
||||
alertmanagerSpec:
|
||||
secrets:
|
||||
- alertmanager-telegram-secret
|
||||
|
||||
prometheus:
|
||||
prometheusSpec:
|
||||
enableRemoteWriteReceiver: true
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
# Promtail - log collection agent for all cluster pods
|
||||
config:
|
||||
clients:
|
||||
- url: http://loki-gateway.prometheus.svc:80/loki/api/v1/push
|
||||
|
||||
# DaemonSet - runs on every node
|
||||
daemonset:
|
||||
enabled: true
|
||||
|
||||
# Tolerations for master/control-plane nodes
|
||||
tolerations:
|
||||
- key: node-role.kubernetes.io/master
|
||||
operator: Exists
|
||||
effect: NoSchedule
|
||||
- key: node-role.kubernetes.io/control-plane
|
||||
operator: Exists
|
||||
effect: NoSchedule
|
||||
|
||||
# Init container to increase inotify limits
|
||||
initContainer:
|
||||
- name: init-inotify
|
||||
image: docker.io/busybox:1.36
|
||||
imagePullPolicy: IfNotPresent
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
- sysctl -w fs.inotify.max_user_instances=512
|
||||
securityContext:
|
||||
privileged: true
|
||||
|
||||
resources:
|
||||
requests:
|
||||
cpu: 50m
|
||||
memory: 64Mi
|
||||
limits:
|
||||
cpu: 200m
|
||||
memory: 128Mi
|
||||
@@ -16,7 +16,7 @@ spec:
|
||||
serviceAccountName: system-upgrade
|
||||
upgrade:
|
||||
image: rancher/k3s-upgrade
|
||||
version: v1.34.3+k3s1
|
||||
version: v1.34.1+k3s1
|
||||
---
|
||||
# Agent plan
|
||||
apiVersion: upgrade.cattle.io/v1
|
||||
@@ -39,5 +39,5 @@ spec:
|
||||
serviceAccountName: system-upgrade
|
||||
upgrade:
|
||||
image: rancher/k3s-upgrade
|
||||
version: v1.34.3+k3s1
|
||||
version: v1.34.1+k3s1
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: Application
|
||||
metadata:
|
||||
name: jellyfin-uk
|
||||
namespace: argocd
|
||||
spec:
|
||||
project: apps
|
||||
destination:
|
||||
namespace: jellyfin-uk
|
||||
server: https://kubernetes.default.svc
|
||||
source:
|
||||
repoURL: ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
targetRevision: HEAD
|
||||
path: k8s/desktop/jellyfin
|
||||
syncPolicy:
|
||||
automated:
|
||||
selfHeal: true
|
||||
prune: true
|
||||
syncOptions:
|
||||
- CreateNamespace=true
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
apiVersion: kustomize.config.k8s.io/v1beta1
|
||||
kind: Kustomization
|
||||
|
||||
resources:
|
||||
- app.yaml
|
||||
- qbittorent.yaml
|
||||
|
||||
helmCharts:
|
||||
- name: jellyfin
|
||||
repo: https://utkuozdemir.org/helm-charts
|
||||
version: 2.0.0
|
||||
releaseName: jellyfin
|
||||
namespace: jellyfin
|
||||
valuesFile: values.yaml
|
||||
includeCRDs: true
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: vpn-creds
|
||||
spec:
|
||||
target:
|
||||
name: vpn-creds
|
||||
deletionPolicy: Delete
|
||||
template:
|
||||
type: Opaque
|
||||
data:
|
||||
ss_link: |-
|
||||
{{ .ss_link }}
|
||||
data:
|
||||
- secretKey: ss_link
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: cfee6f62-fb06-4a4c-b6d8-92da4908c65a
|
||||
property: fields[0].value
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: qbittorrent
|
||||
labels:
|
||||
app: qbittorrent
|
||||
annotations:
|
||||
reloader.stakater.com/auto: "true"
|
||||
spec:
|
||||
selector:
|
||||
matchLabels:
|
||||
app: qbittorrent
|
||||
replicas: 1
|
||||
strategy:
|
||||
type: RollingUpdate
|
||||
rollingUpdate:
|
||||
maxSurge: 1
|
||||
maxUnavailable: 0
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: qbittorrent
|
||||
spec:
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: uk-desktop.tail2fe2d.ts.net
|
||||
tolerations:
|
||||
- key: workload
|
||||
operator: Equal
|
||||
value: desktop
|
||||
effect: NoSchedule
|
||||
volumes:
|
||||
- name: config
|
||||
hostPath:
|
||||
path: /k8s/qbt-config
|
||||
type: DirectoryOrCreate
|
||||
- name: media
|
||||
hostPath:
|
||||
path: /k8s/media/downloads
|
||||
type: DirectoryOrCreate
|
||||
containers:
|
||||
- name: qbittorrent
|
||||
image: 'linuxserver/qbittorrent:latest'
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: 8080
|
||||
protocol: TCP
|
||||
volumeMounts:
|
||||
- name: config
|
||||
mountPath: /config
|
||||
- name: media
|
||||
mountPath: /downloads
|
||||
- name: shadowsocks-proxy
|
||||
image: teddysun/shadowsocks-rust:latest
|
||||
env:
|
||||
- name: SS_LINK
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: vpn-creds
|
||||
key: ss_link
|
||||
command: ["/bin/bash", "-c", "rm /etc/shadowsocks-rust/config.json && sslocal --server-url $SS_LINK --local-addr 127.0.0.1:8081 -U --protocol http"]
|
||||
resources:
|
||||
requests:
|
||||
memory: "64Mi"
|
||||
cpu: "300m"
|
||||
limits:
|
||||
memory: "128Mi"
|
||||
cpu: "300m"
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: qbittorrent
|
||||
spec:
|
||||
selector:
|
||||
app: qbittorrent
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 80
|
||||
targetPort: 8080
|
||||
|
||||
---
|
||||
apiVersion: networking.k8s.io/v1
|
||||
kind: Ingress
|
||||
metadata:
|
||||
name: jf-local-ingress
|
||||
annotations:
|
||||
ingressClassName: traefik
|
||||
spec:
|
||||
rules:
|
||||
- host: tr.uk
|
||||
http:
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: qbittorrent
|
||||
port:
|
||||
number: 80
|
||||
@@ -1,41 +0,0 @@
|
||||
image:
|
||||
tag: 10.11.4
|
||||
resources:
|
||||
requests:
|
||||
memory: "2Gi"
|
||||
cpu: "1000m"
|
||||
limits:
|
||||
memory: "8Gi"
|
||||
cpu: "6000m"
|
||||
nodeSelector:
|
||||
kubernetes.io/hostname: uk-desktop.tail2fe2d.ts.net
|
||||
tolerations:
|
||||
- key: workload
|
||||
operator: Equal
|
||||
value: desktop
|
||||
effect: NoSchedule
|
||||
persistence:
|
||||
config:
|
||||
enabled: true
|
||||
isPvc: false
|
||||
customVolume:
|
||||
hostPath:
|
||||
path: /k8s/jellyfin
|
||||
type: DirectoryOrCreate
|
||||
data:
|
||||
enabled: true
|
||||
isPvc: false
|
||||
customVolume:
|
||||
hostPath:
|
||||
path: /k8s/media/downloads
|
||||
type: DirectoryOrCreate
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
className: traefik
|
||||
hosts:
|
||||
- host: jf.uk
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
apiVersion: argoproj.io/v1alpha1
|
||||
kind: Application
|
||||
metadata:
|
||||
name: khm-client
|
||||
namespace: argocd
|
||||
spec:
|
||||
project: desktop
|
||||
destination:
|
||||
namespace: khm
|
||||
server: https://kubernetes.default.svc
|
||||
source:
|
||||
repoURL: ssh://git@gt.hexor.cy:30022/ab/homelab.git
|
||||
targetRevision: HEAD
|
||||
path: k8s/desktop/khm
|
||||
syncPolicy:
|
||||
automated:
|
||||
selfHeal: true
|
||||
prune: true
|
||||
@@ -1,33 +0,0 @@
|
||||
---
|
||||
apiVersion: external-secrets.io/v1
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: khm-client-creds
|
||||
spec:
|
||||
target:
|
||||
name: khm-client-creds
|
||||
deletionPolicy: Delete
|
||||
template:
|
||||
type: Opaque
|
||||
data:
|
||||
USERNAME: |-
|
||||
{{ .username }}
|
||||
PASSWORD: |-
|
||||
{{ .password }}
|
||||
data:
|
||||
- secretKey: username
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: 19c06480-0814-4d1f-aa80-710105989188
|
||||
property: login.username
|
||||
- secretKey: password
|
||||
sourceRef:
|
||||
storeRef:
|
||||
name: vaultwarden-login
|
||||
kind: ClusterSecretStore
|
||||
remoteRef:
|
||||
key: 19c06480-0814-4d1f-aa80-710105989188
|
||||
property: login.password
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user