Compare commits

..

1 Commits

Author SHA1 Message Date
Gitea Actions Bot
ae6ed229f3 Auto-update README with current k8s applications
All checks were successful
Terraform / Terraform (pull_request) Successful in 20s
Generated by CI/CD workflow on 2026-03-07 21:24:30

This PR updates the README.md file with the current list of applications found in the k8s/ directory structure.
2026-03-07 21:24:30 +00:00
7 changed files with 104 additions and 118 deletions

View File

@@ -4,7 +4,7 @@ metadata:
name: comfyui
namespace: argocd
spec:
project: apps
project: core
destination:
namespace: comfyui
server: https://kubernetes.default.svc

View File

@@ -16,30 +16,18 @@ spec:
app: comfyui
spec:
runtimeClassName: nvidia
tolerations:
- key: workload
operator: Equal
value: desktop
effect: NoSchedule
nodeSelector:
kubernetes.io/hostname: uk-desktop.tail2fe2d.ts.net
# Fix permissions mismatch usually happening when mapping host paths
securityContext:
runAsUser: 0
initContainers:
- name: create-data-dir
image: busybox
command: ["sh", "-c", "mkdir -p /host.data && chown -R 1000:1000 /host.data"]
volumeMounts:
- name: data
mountPath: /host.data
containers:
- name: comfyui
image: runpod/comfyui:latest-5090
image: ghcr.io/ai-dock/comfyui:latest-cuda
imagePullPolicy: IfNotPresent
env:
- name: COMFYUI_PORT
value: "8188"
- name: CLI_ARGS
value: "--listen 0.0.0.0 --port 8188"
ports:
- containerPort: 8188
name: http

View File

@@ -10,14 +10,3 @@ tolerations:
runtimeClassName: nvidia
setAsDefault: false
config:
default: any
map:
any: |-
version: v1
sharing:
timeSlicing:
resources:
- name: nvidia.com/gpu
replicas: 4

View File

@@ -260,7 +260,7 @@ data:
to: 0
datasourceUid: P76F38748CEC837F0
model:
expr: 'node_load15 / on(instance) group_left count by(instance)(node_cpu_seconds_total{mode="idle"})'
expr: 'node_load5 / on(instance) group_left count by(instance)(node_cpu_seconds_total{mode="idle"})'
refId: A
intervalMs: 1000
maxDataPoints: 43200
@@ -273,7 +273,7 @@ data:
conditions:
- evaluator:
params:
- 2
- 0.8
type: gt
operator:
type: and
@@ -283,16 +283,16 @@ data:
type: __expr__
uid: __expr__
expression: A
reducer: last
reducer: max
refId: B
type: reduce
noDataState: NoData
execErrState: Alerting
for: 15m
for: 5m
annotations:
node: '{{ $labels.instance }}'
load_average: '{{ printf "%.2f" $values.A }}'
summary: 'Node load average is critically high relative to CPU count'
summary: 'Node load average is high relative to CPU count'
labels:
severity: warning

View File

@@ -0,0 +1,85 @@
envFromSecret: grafana-admin
nodeSelector:
kubernetes.io/hostname: master.tail2fe2d.ts.net
admin:
existingSecret: grafana-admin
userKey: username
passwordKey: password
grafana.ini:
auth:
signout_redirect_url: https://idm.hexor.cy/application/o/grafana/end-session/
# oauth_auto_login: true
auth.generic_oauth:
name: authentik
enabled: true
scopes: "openid profile email"
auth_url: https://idm.hexor.cy/application/o/authorize/
token_url: https://idm.hexor.cy/application/o/token/
api_url: https://idm.hexor.cy/application/o/userinfo/
role_attribute_path: >-
contains(groups, 'Grafana Admin') && 'Admin' ||
contains(groups, 'Grafana Editors') && 'Editor' ||
contains(groups, 'Grafana Viewer') && 'Viewer'
database:
type: postgres
host: psql.psql.svc:5432
name: grafana
user: grafana
ssl_mode: disable
datasources:
datasources.yaml:
apiVersion: 1
datasources:
- name: Prometheus Local
type: prometheus
url: http://prometheus-kube-prometheus-prometheus.prometheus.svc:9090
access: proxy
isDefault: true
- name: Loki
type: loki
url: http://loki-gateway.prometheus.svc:80
access: proxy
ingress:
enabled: true
ingressClassName: traefik
annotations:
cert-manager.io/cluster-issuer: letsencrypt
traefik.ingress.kubernetes.io/router.middlewares: kube-system-https-redirect@kubernetescrd
hosts:
- gf.hexor.cy
tls:
- secretName: grafana-tls
hosts:
- '*.hexor.cy'
extraConfigmapMounts:
- name: grafana-alerting-rules
mountPath: /etc/grafana/provisioning/alerting/rules.yaml
configMap: grafana-alerting
subPath: rules.yaml
readOnly: true
- name: grafana-alerting-contactpoints
mountPath: /etc/grafana/provisioning/alerting/contactpoints.yaml
configMap: grafana-alerting
subPath: contactpoints.yaml
readOnly: true
- name: grafana-alerting-policies
mountPath: /etc/grafana/provisioning/alerting/policies.yaml
configMap: grafana-alerting
subPath: policies.yaml
readOnly: true
envValueFrom:
TELEGRAM_BOT_TOKEN:
secretKeyRef:
name: grafana-telegram
key: bot-token
TELEGRAM_CHAT_ID:
secretKeyRef:
name: grafana-telegram
key: chat-id

View File

@@ -16,6 +16,14 @@ helmCharts:
valuesFile: prom-values.yaml
includeCRDs: true
- name: grafana
repo: https://grafana.github.io/helm-charts
version: 10.2.0
releaseName: grafana
namespace: prometheus
valuesFile: grafana-values.yaml
includeCRDs: true
- name: loki
repo: https://grafana.github.io/helm-charts
version: 6.29.0

View File

@@ -1,4 +1,5 @@
grafana:
enabled: false
alertmanager:
config:
@@ -91,88 +92,3 @@ prometheus:
requests:
storage: 400Gi
grafana:
enabled: true
serviceAccount:
create: true
name: "prom-grafana-sa"
envFromSecret: grafana-admin
nodeSelector:
kubernetes.io/hostname: master.tail2fe2d.ts.net
admin:
existingSecret: grafana-admin
userKey: username
passwordKey: password
grafana.ini:
auth:
signout_redirect_url: https://idm.hexor.cy/application/o/grafana/end-session/
auth.generic_oauth:
name: authentik
enabled: true
scopes: "openid profile email"
auth_url: https://idm.hexor.cy/application/o/authorize/
token_url: https://idm.hexor.cy/application/o/token/
api_url: https://idm.hexor.cy/application/o/userinfo/
role_attribute_path: >-
contains(groups, 'Grafana Admin') && 'Admin' ||
contains(groups, 'Grafana Editors') && 'Editor' ||
contains(groups, 'Grafana Viewer') && 'Viewer'
database:
type: postgres
host: psql.psql.svc:5432
name: grafana
user: grafana
ssl_mode: disable
# The Loki datasource config needs to be preserved,
# but instead of "datasources.datasources.yaml", we define it like this for the prometheus-stack chart:
additionalDataSources:
- name: Loki
type: loki
url: http://loki-gateway.prometheus.svc:80
access: proxy
orgId: 1
ingress:
enabled: true
ingressClassName: traefik
annotations:
cert-manager.io/cluster-issuer: letsencrypt
traefik.ingress.kubernetes.io/router.middlewares: kube-system-https-redirect@kubernetescrd
hosts:
- gf.hexor.cy
tls:
- secretName: grafana-tls
hosts:
- '*.hexor.cy'
extraConfigmapMounts:
- name: grafana-alerting-rules
mountPath: /etc/grafana/provisioning/alerting/rules.yaml
configMap: grafana-alerting
subPath: rules.yaml
readOnly: true
- name: grafana-alerting-contactpoints
mountPath: /etc/grafana/provisioning/alerting/contactpoints.yaml
configMap: grafana-alerting
subPath: contactpoints.yaml
readOnly: true
- name: grafana-alerting-policies
mountPath: /etc/grafana/provisioning/alerting/policies.yaml
configMap: grafana-alerting
subPath: policies.yaml
readOnly: true
envValueFrom:
TELEGRAM_BOT_TOKEN:
secretKeyRef:
name: grafana-telegram
key: bot-token
TELEGRAM_CHAT_ID:
secretKeyRef:
name: grafana-telegram
key: chat-id