210 Commits

Author SHA1 Message Date
299f661f60 trying some steam containers 2024-09-30 15:18:19 +02:00
1a8f52cc58 update immich 2024-09-30 15:17:02 +02:00
4fb7234df8 switch to backblaze for backups 2024-09-30 15:15:24 +02:00
9f939b16bc update immich 2024-09-26 16:56:36 +02:00
173f7a319c Merge pull request 'Update Helm release immich to v0.7.2' (#181) from renovate/immich-0.x into main
Reviewed-on: #181
2024-09-24 10:32:51 +00:00
284dff3040 Merge pull request 'Update Helm release gitea to v10.4.1' (#189) from renovate/gitea-10.x into main
Reviewed-on: #189
2024-09-24 10:32:39 +00:00
b4529f52fe Merge pull request 'Update Helm release traefik to v31.1.1' (#193) from renovate/traefik-31.x into main
Reviewed-on: #193
2024-09-24 10:32:27 +00:00
6eac191db3 Merge pull request 'Update ghcr.io/advplyr/audiobookshelf Docker tag to v2.13.4' (#192) from renovate/ghcr.io-advplyr-audiobookshelf-2.x into main
Reviewed-on: #192
2024-09-24 10:32:10 +00:00
ed53eeef71 Update Helm release traefik to v31.1.1 2024-09-20 08:33:30 +00:00
b10aced1e1 update grafana sealedsecret 2024-09-19 18:59:12 +02:00
6fcd66ff71 Update ghcr.io/advplyr/audiobookshelf Docker tag to v2.13.4 2024-09-17 12:01:48 +00:00
60077df128 add audiobookshelf 2024-09-17 13:42:33 +02:00
dacb84ee59 allow prune to work with stale locks 2024-09-17 10:24:10 +02:00
40146b69d8 better immich postgres-vectors handling 2024-09-15 19:25:54 +02:00
1a3cd7febd reseal secrets 2024-09-13 15:08:51 +02:00
fad28554bb bump traefik crds 2024-09-13 11:49:13 +02:00
d921738728 Update Helm release gitea to v10.4.1 2024-09-11 13:31:20 +00:00
3bb863dd07 bump immich version 2024-09-09 17:49:39 +02:00
11ab97db50 Merge pull request 'Update jellyfin/jellyfin Docker tag to v10.9.11' (#187) from renovate/jellyfin-jellyfin-10.x into main
Reviewed-on: #187
2024-09-09 10:50:15 +00:00
59bc6540c6 Update jellyfin/jellyfin Docker tag to v10.9.11 2024-09-07 22:31:09 +00:00
fd6e5f50de Merge pull request 'Update Helm release cloudnative-pg to v0.22.0' (#178) from renovate/cloudnative-pg-0.x into main
Reviewed-on: #178
2024-09-07 11:07:05 +00:00
bc0a4186b3 Merge pull request 'Update Helm release traefik to v31' (#182) from renovate/traefik-31.x into main
Reviewed-on: #182
2024-09-05 18:42:17 +00:00
730f8b5121 Merge pull request 'Update actualbudget/actual-server Docker tag to v24.9.0' (#183) from renovate/actualbudget-actual-server-24.x into main
Reviewed-on: #183
2024-09-05 18:41:41 +00:00
86911f133f Merge pull request 'Update Helm release grafana to v8.5.1' (#184) from renovate/grafana-8.x into main
Reviewed-on: #184
2024-09-05 18:41:29 +00:00
de9ac31dbe Update Helm release grafana to v8.5.1 2024-09-05 18:36:19 +00:00
73b9e609dd Merge pull request 'Update owncloud/ocis Docker tag to v5.0.7' (#186) from renovate/owncloud-ocis-5.x into main
Reviewed-on: #186
2024-09-05 18:33:05 +00:00
ae94d3a9a7 Update owncloud/ocis Docker tag to v5.0.7 2024-09-04 21:31:33 +00:00
d077b8fdd8 Merge pull request 'Update homeassistant/home-assistant Docker tag to v2024.9' (#185) from renovate/homeassistant-home-assistant-2024.x into main
Reviewed-on: #185
2024-09-04 20:04:20 +00:00
122e219397 Update homeassistant/home-assistant Docker tag to v2024.9 2024-09-04 18:31:40 +00:00
49073861bc Update actualbudget/actual-server Docker tag to v24.9.0 2024-09-03 17:31:18 +00:00
7ba629e826 Update Helm release traefik to v31 2024-09-03 15:01:23 +00:00
7a872b76f8 bump immich version 2024-09-03 10:35:07 +02:00
e5fa3f2072 Update Helm release immich to v0.7.2 2024-08-30 11:31:07 +00:00
9d1160208f Merge pull request 'Update Helm release grafana to v8.5.0' (#179) from renovate/grafana-8.x into main
Reviewed-on: #179
2024-08-29 09:25:47 +00:00
232952b63e Update Helm release grafana to v8.5.0 2024-08-29 09:25:34 +00:00
79aee6b145 Merge pull request 'Update jellyfin/jellyfin Docker tag to v10.9.10' (#180) from renovate/jellyfin-jellyfin-10.x into main
Reviewed-on: #180
2024-08-29 09:25:20 +00:00
a88968f192 Update jellyfin/jellyfin Docker tag to v10.9.10 2024-08-25 07:01:23 +00:00
8316e39ff7 Merge pull request 'Update ghcr.io/mealie-recipes/mealie Docker tag to v1.12.0' (#177) from renovate/ghcr.io-mealie-recipes-mealie-1.x into main
Reviewed-on: #177
2024-08-23 11:09:01 +00:00
61802b7ec0 Update ghcr.io/mealie-recipes/mealie Docker tag to v1.12.0 2024-08-23 11:08:35 +00:00
87ea82b16d Merge pull request 'Update Helm release grafana to v8.4.7' (#176) from renovate/grafana-8.x into main
Reviewed-on: #176
2024-08-23 11:08:00 +00:00
2596d698d4 Update Helm release cloudnative-pg to v0.22.0 2024-08-22 16:01:28 +00:00
f7b046844e Update Helm release grafana to v8.4.7 2024-08-22 01:31:12 +00:00
b0a802bffc Merge pull request 'Update Helm release cloudnative-pg to v0.21.6' (#161) from renovate/cloudnative-pg-0.x into main
Reviewed-on: #161
2024-08-15 12:12:46 +00:00
b1e3288b94 Update Helm release cloudnative-pg to v0.21.6 2024-08-15 12:11:02 +00:00
02bb4d9f76 Merge pull request 'Update octodns/octodns Docker tag to v2024.08' (#170) from renovate/octodns-octodns-2024.x into main
Reviewed-on: #170
2024-08-15 11:57:32 +00:00
86ac349c5d Update octodns/octodns Docker tag to v2024.08 2024-08-15 11:57:18 +00:00
686525eeff Merge pull request 'Update quay.io/thanos/thanos Docker tag to v0.36.1' (#165) from renovate/quay.io-thanos-thanos-0.x into main
Reviewed-on: #165
2024-08-15 11:57:03 +00:00
39d351e8a1 Update quay.io/thanos/thanos Docker tag to v0.36.1 2024-08-15 11:56:48 +00:00
c152fd117d Merge pull request 'Update Helm release grafana to v8.4.4' (#171) from renovate/grafana-8.x into main
Reviewed-on: #171
2024-08-15 08:27:44 +00:00
6958253c96 Update Helm release grafana to v8.4.4 2024-08-10 06:31:11 +00:00
16074c2026 Merge pull request 'Update docker.io/bitnami/sealed-secrets-controller Docker tag to v0.27.1' (#151) from renovate/docker.io-bitnami-sealed-secrets-controller-0.x into main
Reviewed-on: #151
2024-08-07 22:51:47 +00:00
fd00dbf893 Update docker.io/bitnami/sealed-secrets-controller Docker tag to v0.27.1 2024-08-07 22:51:34 +00:00
513b845de1 Merge pull request 'Update homeassistant/home-assistant Docker tag to v2024.8' (#169) from renovate/homeassistant-home-assistant-2024.x into main
Reviewed-on: #169
2024-08-07 22:51:15 +00:00
a96472553b Update homeassistant/home-assistant Docker tag to v2024.8 2024-08-07 19:01:09 +00:00
55ef4aa6df Merge pull request 'Update actualbudget/actual-server Docker tag to v24.8.0' (#167) from renovate/actualbudget-actual-server-24.x into main
Reviewed-on: #167
2024-08-05 09:58:11 +00:00
b0a6e5fa08 Update actualbudget/actual-server Docker tag to v24.8.0 2024-08-05 09:57:57 +00:00
ab63d1b819 Merge pull request 'Update Helm release grafana to v8.4.1' (#166) from renovate/grafana-8.x into main
Reviewed-on: #166
2024-08-05 09:57:43 +00:00
f3a1e927ff Merge branch 'main' into renovate/grafana-8.x 2024-08-05 09:57:33 +00:00
6f29475d25 Merge pull request 'Update jellyfin/jellyfin Docker tag to v10.9.9' (#168) from renovate/jellyfin-jellyfin-10.x into main
Reviewed-on: #168
2024-08-05 09:57:22 +00:00
e988f55ba8 Update jellyfin/jellyfin Docker tag to v10.9.9 2024-08-05 02:31:25 +00:00
bb259be422 Update Helm release grafana to v8.4.1 2024-08-02 18:01:15 +00:00
ac45bb0958 Merge pull request 'Update ghcr.io/mealie-recipes/mealie Docker tag to v1.11.0' (#164) from renovate/ghcr.io-mealie-recipes-mealie-1.x into main
Reviewed-on: #164
2024-07-31 10:49:04 +00:00
e3580c6170 Update ghcr.io/mealie-recipes/mealie Docker tag to v1.11.0 2024-07-31 10:48:50 +00:00
a801d8ffa8 Merge pull request 'Update Helm release grafana to v8.4.0' (#160) from renovate/grafana-8.x into main
Reviewed-on: #160
2024-07-31 10:48:32 +00:00
53d6029e84 Update Helm release grafana to v8.4.0 2024-07-31 10:01:13 +00:00
239e2fdf49 fix traefik deployment 2024-07-30 18:49:47 +02:00
ae45a87b8a update immich 2024-07-30 17:52:39 +02:00
9cabd42c53 Merge pull request 'Update Helm release metallb to v0.14.8' (#149) from renovate/metallb-0.x into main
Reviewed-on: #149
2024-07-29 09:39:49 +00:00
d45374fe4a Update Helm release metallb to v0.14.8 2024-07-29 09:39:34 +00:00
e350de1a3e Merge pull request 'Update renovate/renovate Docker tag to v38' (#157) from renovate/renovate-renovate-38.x into main
Reviewed-on: #157
2024-07-29 09:39:16 +00:00
8eb64ff444 Merge pull request 'Update Helm release traefik to v30' (#156) from renovate/traefik-30.x into main
Reviewed-on: #156
2024-07-26 08:13:07 +00:00
e8b786e210 Update renovate/renovate Docker tag to v38 2024-07-25 14:01:06 +00:00
37dfd07ea9 Update Helm release traefik to v30 2024-07-24 14:01:26 +00:00
0f872ec949 Merge pull request 'Update owncloud/ocis Docker tag to v5.0.6' (#150) from renovate/owncloud-ocis-5.x into main
Reviewed-on: #150
2024-07-24 08:57:01 +00:00
3b1ab8e595 Update owncloud/ocis Docker tag to v5.0.6 2024-07-24 08:30:56 +00:00
e35da6fc63 Merge pull request 'Update jellyfin/jellyfin Docker tag to v10.9.8' (#154) from renovate/jellyfin-jellyfin-10.x into main
Reviewed-on: #154
2024-07-23 13:12:02 +00:00
da4363262c Merge pull request 'Update Helm release grafana to v8.3.6' (#148) from renovate/grafana-8.x into main
Reviewed-on: #148
2024-07-23 13:11:45 +00:00
ebc787030f Merge pull request 'Update Helm release gitea to v10.4.0' (#155) from renovate/gitea-10.x into main
Reviewed-on: #155
2024-07-23 13:11:28 +00:00
5b2cc939a5 Update Helm release gitea to v10.4.0 2024-07-21 12:01:30 +00:00
f45faf4509 Update jellyfin/jellyfin Docker tag to v10.9.8 2024-07-21 05:31:09 +00:00
7433dd17f4 Update Helm release grafana to v8.3.6 2024-07-20 19:01:02 +00:00
055d091447 redis is required after all 2024-07-18 18:44:04 +02:00
1aa86ef16c better kustomization using remote git refs (instead of git submodules) 2024-07-16 19:08:39 +02:00
dd5e738cab special label for gitea 2024-07-14 12:22:07 +02:00
7e5a1afb90 use nfs-provisioner 2024-07-14 12:11:09 +02:00
175817190c tighter security for deployments, no erronous submodules 2024-07-14 11:37:47 +02:00
31141c6ef1 Merge pull request 'Update Helm release grafana to v8.3.3' (#147) from renovate/grafana-8.x into main
Reviewed-on: #147
2024-07-13 08:57:57 +00:00
e581c3a488 Update Helm release grafana to v8.3.3 2024-07-12 19:30:46 +00:00
4ce4e816c1 Merge pull request 'Update docker.io/bitnami/sealed-secrets-controller Docker tag to v0.27.0' (#108) from renovate/docker.io-bitnami-sealed-secrets-controller-0.x into main
Reviewed-on: #108
2024-07-12 15:58:46 +00:00
f50a2a61fc Merge pull request 'Update Helm release traefik to v29' (#145) from renovate/traefik-29.x into main
Reviewed-on: #145
2024-07-12 15:58:24 +00:00
ee6e4f1e32 Merge pull request 'Update Helm release gitea to v10.3.0' (#146) from renovate/gitea-10.x into main
Reviewed-on: #146
2024-07-12 15:57:24 +00:00
40454d871f update immich 2024-07-12 17:52:10 +02:00
e503ae6d30 bump immich version 2024-07-11 14:51:37 +02:00
5233956a09 Update Helm release traefik to v29 2024-07-09 09:30:56 +00:00
e7118e9182 Merge pull request 'Update Helm release cloudnative-pg to v0.21.5' (#82) from renovate/cloudnative-pg-0.x into main
Reviewed-on: #82
2024-07-09 07:21:52 +00:00
e79da15d16 home assistant dashboard improvements 2024-07-09 09:20:41 +02:00
1bcaafd14e Update Helm release gitea to v10.3.0 2024-07-07 13:00:59 +00:00
6a10c8a908 Merge pull request 'Update Helm release grafana to v8.3.2' (#138) from renovate/grafana-8.x into main
Reviewed-on: #138
2024-07-05 09:03:47 +00:00
7f61158564 Merge pull request 'Update adguard/adguardhome Docker tag to v0.107.52' (#144) from renovate/adguard-adguardhome-0.x into main
Reviewed-on: #144
2024-07-05 09:03:34 +00:00
2f17e6d47a Merge pull request 'Update homeassistant/home-assistant Docker tag to v2024.7' (#143) from renovate/homeassistant-home-assistant-2024.x into main
Reviewed-on: #143
2024-07-05 09:03:20 +00:00
466d58b26b Merge pull request 'Update ghcr.io/mealie-recipes/mealie Docker tag to v1.10.2' (#142) from renovate/ghcr.io-mealie-recipes-mealie-1.x into main
Reviewed-on: #142
2024-07-05 09:03:03 +00:00
03f873ecf4 Merge pull request 'Update actualbudget/actual-server Docker tag to v24.7.0' (#141) from renovate/actualbudget-actual-server-24.x into main
Reviewed-on: #141
2024-07-05 09:02:52 +00:00
56cca145b4 Update ghcr.io/mealie-recipes/mealie Docker tag to v1.10.2 2024-07-05 06:31:10 +00:00
3ecd55787a Update adguard/adguardhome Docker tag to v0.107.52 2024-07-04 16:01:01 +00:00
45e46cf6e9 Update Helm release grafana to v8.3.2 2024-07-04 13:00:59 +00:00
c19d6d8244 Update homeassistant/home-assistant Docker tag to v2024.7 2024-07-03 18:01:23 +00:00
c5250c5a45 Update actualbudget/actual-server Docker tag to v24.7.0 2024-07-02 21:01:19 +00:00
e70c1c9685 actually, as a job makes more sense. And is reschedulable 2024-07-02 18:48:14 +02:00
b5d6f28178 use a pod that is allowed to stop 2024-07-02 17:03:23 +02:00
14a54e691d add even higher limits for minecraft 2024-07-02 15:15:13 +02:00
d6eb7b8f84 Merge pull request 'Update Helm release grafana to v8.2.1' (#137) from renovate/grafana-8.x into main
Reviewed-on: #137
2024-07-01 12:07:37 +00:00
025e0c4ff1 Update Helm release grafana to v8.2.1 2024-07-01 10:01:05 +00:00
d76455787a more generous limits for minecraft 2024-07-01 10:08:08 +02:00
252b732bd8 remove homepage 2024-07-01 10:00:16 +02:00
93ca89060c msinomer 2024-06-30 22:37:40 +02:00
8e043fdd58 cleanup 2024-06-29 12:45:55 +02:00
d87b8bcff2 Merge pull request 'Update jellyfin/jellyfin Docker tag to v10.9.7' (#136) from renovate/jellyfin-jellyfin-10.x into main
Reviewed-on: #136
2024-06-25 08:36:08 +00:00
4be1c00592 Update jellyfin/jellyfin Docker tag to v10.9.7 2024-06-25 01:01:04 +00:00
9b1303d10e update dashboards 2024-06-20 23:33:41 +02:00
36f2596dfb Update docker.io/bitnami/sealed-secrets-controller Docker tag to v0.27.0 2024-06-20 11:31:01 +00:00
abf59c480f make servicemonitor be discoverable 2024-06-19 18:06:10 +02:00
c521a23a16 Merge pull request 'Update quay.io/thanos/thanos Docker tag to v0.35.1' (#87) from renovate/quay.io-thanos-thanos-0.x into main
Reviewed-on: #87
2024-06-18 18:44:56 +00:00
b646968c16 Update apps/immich/kustomization.yaml
bump immich version
2024-06-18 18:44:22 +00:00
a1afc7d736 Merge pull request 'Update ghcr.io/mealie-recipes/mealie Docker tag to v1.9.0' (#135) from renovate/ghcr.io-mealie-recipes-mealie-1.x into main
Reviewed-on: #135
2024-06-18 18:42:56 +00:00
799d084471 Update ghcr.io/mealie-recipes/mealie Docker tag to v1.9.0 2024-06-18 10:01:00 +00:00
511ed7e78d Update Helm release cloudnative-pg to v0.21.5 2024-06-13 14:30:47 +00:00
0d1d10a103 slim down jellyfin 2024-06-13 13:14:44 +02:00
de667a31ad immich update 2024-06-13 00:21:48 +02:00
ef2b1d393d Merge pull request 'Update Helm release grafana to v8.0.2' (#131) from renovate/grafana-8.x into main
Reviewed-on: #131
2024-06-12 22:09:44 +00:00
0402d54fda Merge pull request 'Update octodns/octodns Docker tag to v2024.06' (#127) from renovate/octodns-octodns-2024.x into main
Reviewed-on: #127
2024-06-12 22:09:28 +00:00
d80dfc35fd Update Helm release grafana to v8.0.2 2024-06-12 08:30:50 +00:00
9d47443573 Update octodns/octodns Docker tag to v2024.06 2024-06-10 17:00:57 +00:00
806b42874c update thanos 2024-06-10 00:34:01 +02:00
3c71ac8411 Merge pull request 'Update Helm release grafana to v8.0.1' (#125) from renovate/grafana-8.x into main
Reviewed-on: #125
2024-06-09 21:42:40 +00:00
c2db5eb712 Merge pull request 'Update alpine/git Docker tag to v2.45.2' (#126) from renovate/alpine-git-2.x into main
Reviewed-on: #126
2024-06-09 21:42:14 +00:00
040771494a Merge pull request 'Update Helm release traefik to v28' (#85) from renovate/traefik-28.x into main
Reviewed-on: #85
2024-06-09 21:38:58 +00:00
57c57b7620 changes according to migration docs 2024-06-09 23:38:34 +02:00
a41ec520a2 Update alpine/git Docker tag to v2.45.2 2024-06-09 04:31:00 +00:00
9057768561 Update Helm release grafana to v8.0.1 2024-06-07 21:00:56 +00:00
db3dc9a8af Merge pull request 'Update Helm release gitea to v10.2.0' (#124) from renovate/gitea-10.x into main
Reviewed-on: #124
2024-06-07 16:59:35 +00:00
31a968ef87 Merge pull request 'Update jellyfin/jellyfin Docker tag to v10.9.6' (#123) from renovate/jellyfin-jellyfin-10.x into main
Reviewed-on: #123
2024-06-07 16:59:00 +00:00
9778d796a9 Update Helm release gitea to v10.2.0 2024-06-06 21:01:06 +00:00
7a44938d6d Update jellyfin/jellyfin Docker tag to v10.9.6 2024-06-06 19:00:59 +00:00
689038a808 Merge pull request 'Update adguard/adguardhome Docker tag to v0.107.51' (#122) from renovate/adguard-adguardhome-0.x into main
Reviewed-on: #122
2024-06-06 17:19:47 +00:00
88ca15d995 Update adguard/adguardhome Docker tag to v0.107.51 2024-06-06 15:00:56 +00:00
249b335ccb Merge pull request 'Update jellyfin/jellyfin Docker tag to v10.9.5' (#120) from renovate/jellyfin-jellyfin-10.x into main
Reviewed-on: #120
2024-06-06 09:22:20 +00:00
8c33c50457 Merge pull request 'Update ghcr.io/gethomepage/homepage Docker tag to v0.9.2' (#121) from renovate/ghcr.io-gethomepage-homepage-0.x into main
Reviewed-on: #121
2024-06-06 09:22:08 +00:00
4f1cbbabe6 Update ghcr.io/gethomepage/homepage Docker tag to v0.9.2 2024-06-06 03:30:44 +00:00
4f4e6bdf13 Update jellyfin/jellyfin Docker tag to v10.9.5 2024-06-05 22:30:46 +00:00
ebbece048e Merge pull request 'Update homeassistant/home-assistant Docker tag to v2024.6' (#119) from renovate/homeassistant-home-assistant-2024.x into main
Reviewed-on: #119
2024-06-05 21:58:04 +00:00
9987aa9d0b Merge pull request 'Update ghcr.io/mealie-recipes/mealie Docker tag to v1.8.0' (#118) from renovate/ghcr.io-mealie-recipes-mealie-1.x into main
Reviewed-on: #118
2024-06-05 21:57:12 +00:00
14cc093e51 Merge pull request 'Update alpine/git Docker tag to v2.45.1' (#110) from renovate/alpine-git-2.x into main
Reviewed-on: #110
2024-06-05 21:56:57 +00:00
18576ff7f2 Update homeassistant/home-assistant Docker tag to v2024.6 2024-06-05 19:31:05 +00:00
bee9243407 Update ghcr.io/mealie-recipes/mealie Docker tag to v1.8.0 2024-06-05 19:31:01 +00:00
8223b336ed Merge pull request 'Update jellyfin/jellyfin Docker tag to v10.9.4' (#112) from renovate/jellyfin-jellyfin-10.x into main
Reviewed-on: #112
2024-06-05 19:28:21 +00:00
1fd0da6778 Merge pull request 'Update Helm release grafana to v8' (#115) from renovate/grafana-8.x into main
Reviewed-on: #115
2024-06-05 19:26:21 +00:00
6be344fc8d Merge pull request 'Update actualbudget/actual-server Docker tag to v24.6.0' (#114) from renovate/actualbudget-actual-server-24.x into main
Reviewed-on: #114
2024-06-05 19:25:58 +00:00
d46ee3894e Merge pull request 'Update ghcr.io/gethomepage/homepage Docker tag to v0.9.1' (#117) from renovate/ghcr.io-gethomepage-homepage-0.x into main
Reviewed-on: #117
2024-06-05 19:25:24 +00:00
b282f363ce Update ghcr.io/gethomepage/homepage Docker tag to v0.9.1 2024-06-03 20:35:37 +00:00
4b494642f5 Update Helm release grafana to v8 2024-06-03 16:01:41 +00:00
08c508862f Update actualbudget/actual-server Docker tag to v24.6.0 2024-06-03 10:31:05 +00:00
3d63498b25 Update jellyfin/jellyfin Docker tag to v10.9.4 2024-06-01 23:01:04 +00:00
4ef6b01a92 Update Helm release traefik to v28 2024-05-31 08:31:11 +00:00
7cf2c9c479 Update quay.io/thanos/thanos Docker tag to v0.35.1 2024-05-28 14:31:04 +00:00
a11f3e24f8 Merge pull request 'Update jellyfin/jellyfin Docker tag to v10.9.3' (#111) from renovate/jellyfin-jellyfin-10.x into main
Reviewed-on: #111
2024-05-27 17:39:48 +00:00
adff6180ea Update jellyfin/jellyfin Docker tag to v10.9.3 2024-05-27 00:30:59 +00:00
99dd81531e Update alpine/git Docker tag to v2.45.1 2024-05-25 23:01:08 +00:00
4f18adf1da try once more 2024-05-25 13:12:23 +02:00
7e3f8a2764 and undo because it doesn't work 2024-05-25 12:39:33 +02:00
3a94d7a7b7 add docker builder using kubernetes natively 2024-05-25 12:32:15 +02:00
9f8ae4b0fa gitea revert to dind runner 2024-05-25 11:24:55 +02:00
d53ee0079e Merge pull request 'Update ghcr.io/mealie-recipes/mealie Docker tag to v1.7.0' (#106) from renovate/ghcr.io-mealie-recipes-mealie-1.x into main
Reviewed-on: #106
2024-05-24 19:18:11 +00:00
f844eb8caa Merge pull request 'Update adguard/adguardhome Docker tag to v0.107.50' (#107) from renovate/adguard-adguardhome-0.x into main
Reviewed-on: #107
2024-05-23 21:38:05 +00:00
fb645058ac Update adguard/adguardhome Docker tag to v0.107.50 2024-05-23 15:31:10 +00:00
261790e329 Update ghcr.io/mealie-recipes/mealie Docker tag to v1.7.0 2024-05-23 11:14:10 +00:00
645c8edde7 Merge pull request 'Update adguard/adguardhome Docker tag to v0.107.49' (#102) from renovate/adguard-adguardhome-0.x into main
Reviewed-on: #102
2024-05-23 11:10:18 +00:00
c7b52155ac allow spindown of minecraft server 2024-05-23 13:08:48 +02:00
46a2c8998e Merge pull request 'Update alpine/git Docker tag to v2.43.4' (#101) from renovate/alpine-git-2.x into main
Reviewed-on: #101
2024-05-23 09:42:19 +00:00
fbba22cb07 Merge pull request 'Update owncloud/ocis Docker tag to v5.0.5' (#103) from renovate/owncloud-ocis-5.x into main
Reviewed-on: #103
2024-05-23 09:42:00 +00:00
f03c76c53b Update owncloud/ocis Docker tag to v5.0.5 2024-05-22 14:30:56 +00:00
c7f5cb8773 Update adguard/adguardhome Docker tag to v0.107.49 2024-05-21 15:30:48 +00:00
206f8e4c50 try k8s-native actions once more 2024-05-21 12:14:48 +02:00
03df5e4663 Merge pull request 'Update jellyfin/jellyfin Docker tag to v10.9.2' (#100) from renovate/jellyfin-jellyfin-10.x into main
Reviewed-on: #100
2024-05-20 19:12:17 +00:00
72906d205b with certs 2024-05-20 12:22:56 +02:00
c6f7471ebb try fixing the labels 2024-05-20 12:15:56 +02:00
a3550d10cb add wireguard 2024-05-19 12:31:50 +02:00
f22d25b101 add minecraft without autosync 2024-05-19 11:22:21 +02:00
b7b9afa1a5 Update alpine/git Docker tag to v2.43.4 2024-05-19 04:30:42 +00:00
835f05866c different gitea runner strategy 2024-05-18 17:19:14 +02:00
1aa2e55f22 try a better gitea actions runner 2024-05-18 13:57:26 +02:00
3c777a92c0 Update jellyfin/jellyfin Docker tag to v10.9.2 2024-05-17 21:00:55 +00:00
7d893d27ec bump immich version 2024-05-16 10:13:19 +02:00
d0fcf951cc bump immich version 2024-05-16 09:51:57 +02:00
1e9959e3d1 better minecraft deployment 2024-05-16 09:51:16 +02:00
ce821b6abe Merge pull request 'Update binwiederhier/ntfy Docker tag to v2.11.0' (#98) from renovate/binwiederhier-ntfy-2.x into main
Reviewed-on: #98
2024-05-16 07:39:12 +00:00
1de224ea77 Merge pull request 'Update jellyfin/jellyfin Docker tag to v10.9.1' (#95) from renovate/jellyfin-jellyfin-10.x into main
Reviewed-on: #95
2024-05-16 07:37:41 +00:00
103f4c8a9f Merge pull request 'Update owncloud/ocis Docker tag to v5.0.4' (#99) from renovate/owncloud-ocis-5.x into main
Reviewed-on: #99
2024-05-16 07:37:20 +00:00
124881d3a8 Update owncloud/ocis Docker tag to v5.0.4 2024-05-14 13:31:01 +00:00
0b5d2a5fe6 Update jellyfin/jellyfin Docker tag to v10.9.1 2024-05-14 09:01:22 +00:00
332082c9fc Update binwiederhier/ntfy Docker tag to v2.11.0 2024-05-13 20:31:02 +00:00
0eaa9fe774 empty line removed 2024-05-13 14:26:53 +02:00
192e2e869f minecraft 2024-05-13 14:25:49 +02:00
0fd9936db5 gitea runner improvements 2024-05-13 14:25:49 +02:00
1a9d0fc00c Merge pull request 'Update jellyfin/jellyfin Docker tag to v10.9.0' (#94) from renovate/jellyfin-jellyfin-10.x into main
Reviewed-on: #94
2024-05-12 11:07:57 +00:00
a8dfca3c43 Update jellyfin/jellyfin Docker tag to v10.9.0 2024-05-11 19:01:08 +00:00
42e2bc35a5 Merge pull request 'Update ghcr.io/gethomepage/homepage Docker tag to v0.8.13' (#90) from renovate/ghcr.io-gethomepage-homepage-0.x into main
Reviewed-on: #90
2024-05-10 08:46:45 +00:00
7e2e5a56db Merge branch 'main' into renovate/ghcr.io-gethomepage-homepage-0.x 2024-05-10 08:45:47 +00:00
01279dd023 Merge pull request 'Update octodns/octodns Docker tag to v2024.05' (#91) from renovate/octodns-octodns-2024.x into main
Reviewed-on: #91
2024-05-08 13:29:51 +00:00
5f1dcaabba Update octodns/octodns Docker tag to v2024.05 2024-05-06 15:30:45 +00:00
37bdb32f43 Update ghcr.io/gethomepage/homepage Docker tag to v0.8.13 2024-05-06 05:30:44 +00:00
91 changed files with 860 additions and 1078 deletions

6
.gitmodules vendored
View File

@@ -1,6 +0,0 @@
[submodule "infrastructure/external-dns/octodns"]
path = infrastructure/external-dns/octodns
url = ssh://git@git.kluster.moll.re:2222/remoll/dns.git
[submodule "apps/monitoring/dashboards"]
path = apps/monitoring/dashboards
url = ssh://git@git.kluster.moll.re:2222/remoll/grafana-dashboards.git

View File

@@ -10,7 +10,7 @@ resources:
images:
- name: adguard/adguardhome
newName: adguard/adguardhome
newTag: v0.107.48
newTag: v0.107.52
namespace: adguard

View File

@@ -0,0 +1,42 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: audiobookshelf
spec:
replicas: 1
selector:
matchLabels:
app: audiobookshelf
template:
metadata:
labels:
app: audiobookshelf
spec:
containers:
- name: audiobookshelf
image: audiobookshelf
ports:
- containerPort: 80
env:
- name: TZ
value: Europe/Berlin
- name: CONFIG_PATH
value: /data/config
- name: METADATA_PATH
value: /data/metadata
volumeMounts:
- name: data
mountPath: /data
resources:
requests:
cpu: "100m"
memory: "200Mi"
limits:
cpu: "2"
memory: "1Gi"
volumes:
- name: data
persistentVolumeClaim:
claimName: audiobookshelf-data

View File

@@ -0,0 +1,17 @@
apiVersion: traefik.io/v1alpha1
kind: IngressRoute
metadata:
name: audiobookshelf-ingressroute
spec:
entryPoints:
- websecure
routes:
- match: Host(`audiobookshelf.kluster.moll.re`)
kind: Rule
services:
- name: audiobookshelf-web
port: 80
tls:
certResolver: default-tls

View File

@@ -1,17 +1,15 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: homepage
resources:
- namespace.yaml
- pvc.yaml
- deployment.yaml
- service.yaml
- configmap.yaml
- ingress.yaml
namespace: audiobookshelf
images:
- name: homepage
newName: ghcr.io/gethomepage/homepage
newTag: v0.8.12
- name: audiobookshelf
newName: ghcr.io/advplyr/audiobookshelf
newTag: "2.13.4"

View File

@@ -0,0 +1,11 @@
kind: PersistentVolumeClaim
apiVersion: v1
metadata:
name: audiobookshelf-data
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi

View File

@@ -0,0 +1,10 @@
apiVersion: v1
kind: Service
metadata:
name: audiobookshelf-web
spec:
selector:
app: audiobookshelf
ports:
- port: 80
targetPort: 80

View File

@@ -13,4 +13,4 @@ namespace: files
images:
- name: ocis
newName: owncloud/ocis
newTag: "5.0.3"
newTag: "5.0.7"

File diff suppressed because one or more lines are too long

View File

@@ -13,4 +13,4 @@ resources:
images:
- name: actualbudget
newName: actualbudget/actual-server
newTag: 24.5.0
newTag: 24.9.0

View File

@@ -0,0 +1,15 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: games
resources:
- namespace.yaml
helmCharts:
- name: games-on-whales
releaseName: games-on-whales
version: 2.0.0
valuesFile: values.yaml
repo: https://angelnu.github.io/helm-charts

View File

@@ -0,0 +1,6 @@
apiVersion: v1
kind: Namespace
metadata:
name: placeholder
labels:
pod-security.kubernetes.io/enforce: privileged

143
apps/games/values.yaml Normal file
View File

@@ -0,0 +1,143 @@
#
# IMPORTANT NOTE
#
# This chart inherits from our common library chart. You can check the default values/options here:
# https://github.com/k8s-at-home/library-charts/tree/main/charts/stable/common/values.yaml
#
ingress:
# -- Enable and configure ingress settings for the chart under this key.
# @default -- See values.yaml
main:
enabled: false
service:
# -- Enable and configure TCP service settings for the chart under this key.
# @default -- See values.yaml
main: {}
# type: LoadBalancer
# loadBalancerIP: 192.168.1.129
# -- Enable and configure UDP service settings for the chart under this key.
# @default -- See values.yaml
udp: {}
# type: LoadBalancer
# loadBalancerIP: 192.168.1.129
# -- Configure persistence settings for the chart under this key.
# @default -- See values.yaml
persistence:
home:
enabled: true
type: emptyDir
mountPath: /home/retro
# -- (object) Pass GPU resources to Xorg, steam and retroarch containers
# See Custom configuration section in the Readme
graphic_resources:
sunshine:
image:
# -- sunshine image repository
repository: ghcr.io/games-on-whales/sunshine
# -- sunshine image tag
tag: 1.0.0
# -- sunshine image pull policy
pullPolicy: IfNotPresent
# -- sunshine web interface user
user: admin
# -- sunshine web interface pasword
password: admin
# -- sunshine log level
logLevel: info
# -- sunshine additional env settings
env: {}
xorg:
image:
# -- xorg image repository
repository: ghcr.io/games-on-whales/xorg
# -- xorg image tag
tag: 1.0.0
# -- xorg image pull policy
pullPolicy: IfNotPresent
# -- xorg display ID
# display: :99
# -- xorg refresh rate
# refreshrate: 60
# -- xorg resolution
resolution: 1920x1080
pulseaudio:
image:
# -- pulseaudio image repository
repository: ghcr.io/games-on-whales/pulseaudio
# -- pulseaudio image tag
tag: 1.0.0
# -- pulseaudio image pull policy
pullPolicy: IfNotPresent
retroarch:
# -- enable/disable retroarch container
enabled: true
image:
# -- retroarch image repository
repository: ghcr.io/games-on-whales/retroarch
# -- retroarch image tag
tag: 1.0.0
# -- retroarch image pull policy
pullPolicy: IfNotPresent
# -- retroarch log level
logLevel: info
# -- retroarch extra volume mounts
volumeMounts: []
steam:
# -- enable/disable steam container
enabled: true
image:
# -- steam image repository
repository: ghcr.io/games-on-whales/steam
# -- steam image tag
tag: 1.0.0
# -- steam image pull policy
pullPolicy: IfNotPresent
# -- enable proton log
protonLog: 1
# -- steam extra volume mounts
volumeMounts: []
firefox:
# -- enable/disable firefox container
enabled: true
image:
# -- image repository
repository: andrewmackrodt/firefox-x11
# -- image tag
tag: 125.0.2-r1
# -- image pull policy
pullPolicy: IfNotPresent
# -- firefox log level
logLevel: info
# -- firefox extra volume mounts
volumeMounts: []
mkhomeretrodirs:
image:
# -- image repository
repository: busybox
# -- image tag
tag: 1.36.1
# -- image pull policy
pullPolicy: IfNotPresent
# -- Configure pulse audio settings
# @default -- See values.yaml
pulse:
config:
default.pa: |-
.fail
load-module module-null-sink sink_name=sunshine
set-default-sink sunshine
load-module module-native-protocol-unix auth-anonymous=1 socket=/tmp/pulse/pulse-socket
client.conf: |-
default-sink = sink-sunshine-stereo
autospawn = no
daemon-binary = /bin/true
daemon.conf: |-
exit-idle-time = -1
flat-volumes = yes

View File

@@ -15,4 +15,4 @@ resources:
images:
- name: homeassistant/home-assistant
newName: homeassistant/home-assistant
newTag: "2024.5"
newTag: "2024.9"

View File

@@ -2,6 +2,8 @@ apiVersion: v1
kind: Service
metadata:
name: homeassistant-web
labels:
app: homeassistant
spec:
selector:
app: homeassistant

View File

@@ -1,98 +0,0 @@
apiVersion: v1
kind: ConfigMap
metadata:
name: config
labels:
app.kubernetes.io/name: homepage
data:
kubernetes.yaml: "" #|
# mode: cluster
settings.yaml: |
title: "Homepage"
background: https://images.unsplash.com/photo-1547327132-5d20850c62b5?q=80&w=3870&auto=format&fit=crop
cardBlur: sm
#settings.yaml: |
# providers:
# longhorn:
# url: https://longhorn.my.network
custom.css: ""
custom.js: ""
bookmarks.yaml: |
- Developer:
- Github:
- abbr: GH
href: https://github.com/moll-re
services.yaml: |
- Media:
- Jellyfin backend:
href: https://media-backend.kluster.moll.re
ping: media-backend.kluster.moll.re
- Jellyfin vue:
href: https://media.kluster.moll.re
ping: media.kluster.moll.re
- Immich:
href: https://immich.kluster.moll.re
ping: immich.kluster.moll.re
- Productivity:
- OwnCloud:
href: https://ocis.kluster.moll.re
ping: ocis.kluster.moll.re
- ToDo:
href: https://todos.kluster.moll.re
ping: todos.kluster.moll.re
- Finance:
href: https://finance.kluster.moll.re
ping: finance.kluster.moll.re
- Home:
- Home Assistant:
href: https://home.kluster.moll.re
ping: home.kluster.moll.re
- Grafana:
href: https://grafana.kluster.moll.re
ping: grafana.kluster.moll.re
- Recipes:
href: https://recipes.kluster.moll.re
ping: recipes.kluster.moll.re
- Infra:
- Gitea:
href: https://git.kluster.moll.re
ping: git.kluster.moll.re
- ArgoCD:
href: https://argocd.kluster.moll.re
ping: argocd.kluster.moll.re
widgets.yaml: |
# - kubernetes:
# cluster:
# show: true
# cpu: true
# memory: true
# showLabel: true
# label: "cluster"
# nodes:
# show: true
# cpu: true
# memory: true
# showLabel: true
- search:
provider: duckduckgo
- openmeteo:
label: Zürich # optional
latitude: 47.24236
longitude: 8.30439
units: metric # or imperial
cache: 30 # Time in minutes to cache API responses, to stay within limits
format: # optional, Intl.NumberFormat options
maximumFractionDigits: 1
- datetime:
locale: de
format:
dateStyle: long
timeStyle: short
- adguard:
url: http://adguard-home-web.adguard-home:3000
docker.yaml: ""

View File

@@ -1,64 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: homepage
labels:
app.kubernetes.io/name: homepage
spec:
revisionHistoryLimit: 3
replicas: 1
strategy:
type: RollingUpdate
selector:
matchLabels:
app.kubernetes.io/name: homepage
template:
metadata:
labels:
app.kubernetes.io/name: homepage
spec:
# serviceAccountName: homepage
# automountServiceAccountToken: true
dnsPolicy: ClusterFirst
# enableServiceLinks: true
containers:
- name: homepage
image: homepage
imagePullPolicy: Always
ports:
- name: http
containerPort: 3000
protocol: TCP
volumeMounts:
- mountPath: /app/config/custom.js
name: config
subPath: custom.js
- mountPath: /app/config/custom.css
name: config
subPath: custom.css
- mountPath: /app/config/bookmarks.yaml
name: config
subPath: bookmarks.yaml
- mountPath: /app/config/docker.yaml
name: config
subPath: docker.yaml
- mountPath: /app/config/kubernetes.yaml
name: config
subPath: kubernetes.yaml
- mountPath: /app/config/services.yaml
name: config
subPath: services.yaml
- mountPath: /app/config/settings.yaml
name: config
subPath: settings.yaml
- mountPath: /app/config/widgets.yaml
name: config
subPath: widgets.yaml
- mountPath: /app/config/logs
name: logs
volumes:
- name: config
configMap:
name: config
- name: logs
emptyDir: {}

View File

@@ -1,16 +0,0 @@
apiVersion: traefik.io/v1alpha1
kind: IngressRoute
metadata:
name: homepage-ingressroute
spec:
entryPoints:
- websecure
routes:
- match: Host(`start.kluster.moll.re`)
kind: Rule
services:
- name: homepage-web
port: 3000
tls:
certResolver: default-tls

View File

@@ -1,15 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: homepage-web
labels:
app.kubernetes.io/name: homepage
spec:
type: ClusterIP
ports:
- port: 3000
targetPort: http
protocol: TCP
name: http
selector:
app.kubernetes.io/name: homepage

View File

@@ -7,18 +7,27 @@ resources:
- postgres.yaml
- postgres.sealedsecret.yaml
namespace: immich
helmCharts:
- name: immich
releaseName: immich
version: 0.6.0
version: 0.7.2
valuesFile: values.yaml
repo: https://immich-app.github.io/immich-charts
images:
- name: ghcr.io/immich-app/immich-machine-learning
newTag: v1.103.1
newTag: v1.116.2
- name: ghcr.io/immich-app/immich-server
newTag: v1.103.1
newTag: v1.116.2
patches:
- path: patch-redis-pvc.yaml
target:
kind: StatefulSet
name: immich-redis-master

View File

@@ -0,0 +1,17 @@
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: immich-redis-master
spec:
volumeClaimTemplates:
- apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: redis-data
spec:
storageClassName: nfs-client
accessModes:
- ReadWriteMany
resources:
requests:
storage: 2Gi

View File

@@ -1,4 +1,3 @@
apiVersion: postgresql.cnpg.io/v1
kind: Cluster
metadata:
@@ -13,18 +12,24 @@ spec:
secret:
name: postgres-password
# Enable the VECTORS extension
postInitSQL:
- CREATE EXTENSION IF NOT EXISTS "vectors";
postgresql:
shared_preload_libraries:
- "vectors.so"
# Persistent storage configuration
storage:
size: 1Gi
size: 2Gi
pvcTemplate:
storageClassName: ""
accessModes:
- ReadWriteOnce
resources:
requests:
storage: "1Gi"
volumeName: immich-postgres
storage: 2Gi
storageClassName: nfs-client
volumeMode: Filesystem
monitoring:
enablePodMonitor: true

View File

@@ -1,40 +1,11 @@
---
apiVersion: v1
kind: PersistentVolume
metadata:
name: immich-nfs
spec:
capacity:
storage: "50Gi"
accessModes:
- ReadWriteOnce
nfs:
path: /kluster/immich
server: 192.168.1.157
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: immich-nfs
name: data
spec:
storageClassName: ""
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: "50Gi"
volumeName: immich-nfs
---
apiVersion: v1
kind: PersistentVolume
metadata:
name: immich-postgres
spec:
capacity:
storage: "1Gi"
accessModes:
- ReadWriteOnce
nfs:
path: /kluster/immich-postgres
server: 192.168.1.157
# later used by cnpg
storage: "100Gi"

View File

@@ -22,16 +22,19 @@ env:
secretKeyRef:
name: postgres-password
key: password
IMMICH_WEB_URL: '{{ printf "http://%s-web:3000" .Release.Name }}'
IMMICH_MACHINE_LEARNING_URL: '{{ printf "http://%s-machine-learning:3003" .Release.Name }}'
IMMICH_METRICS: true
immich:
metrics:
# Enabling this will create the service monitors needed to monitor immich with the prometheus operator
enabled: true
persistence:
# Main data store for all photos shared between different components.
library:
# Automatically creating the library volume is not supported by this chart
# You have to specify an existing PVC to use
existingClaim: immich-nfs
existingClaim: data
# Dependencies
@@ -52,16 +55,6 @@ server:
main:
enabled: false
microservices:
enabled: true
persistence:
geodata-cache:
enabled: true
size: 1Gi
# Optional: Set this to pvc to avoid downloading the geodata every start.
type: emptyDir
accessMode: ReadWriteMany
machine-learning:
enabled: true
persistence:

View File

@@ -18,6 +18,9 @@ spec:
limits:
memory: "2Gi"
cpu: "2"
requests:
memory: "128Mi"
cpu: "250m"
ports:
- containerPort: 8096
name: jellyfin

View File

@@ -1,24 +1,5 @@
apiVersion: traefik.io/v1alpha1
kind: IngressRoute
metadata:
name: jellyfin-vue-ingress
namespace: media
spec:
entryPoints:
- websecure
routes:
- match: Host(`media.kluster.moll.re`)
middlewares:
- name: jellyfin-websocket
kind: Rule
services:
- name: jellyfin-web
port: 80
tls:
certResolver: default-tls
---
apiVersion: traefik.io/v1alpha1
kind: IngressRoute
metadata:
name: jellyfin-backend-ingress
namespace: media
@@ -26,7 +7,7 @@ spec:
entryPoints:
- websecure
routes:
- match: Host(`media-backend.kluster.moll.re`) && !Path(`/metrics`)
- match: Host(`media.kluster.moll.re`) && !Path(`/metrics`)
middlewares:
- name: jellyfin-websocket
- name: jellyfin-server-headers

View File

@@ -1,17 +0,0 @@
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: jellyfin
labels:
metrics: prometheus
spec:
selector:
matchLabels:
app: jellyfin-server-service
endpoints:
- path: /metrics
targetPort: jellyfin
# this exposes metrics on port 8096 as enabled in the jellyfin config
# https://jellyfin.org/docs/general/networking/monitoring/
# the metrics are available at /metrics but blocked by the ingress

View File

@@ -5,16 +5,11 @@ namespace: media
resources:
- namespace.yaml
- pvc.yaml
- server.deployment.yaml
- server.service.yaml
- web.deployment.yaml
- web.service.yaml
- deployment.yaml
- service.yaml
- ingress.yaml
images:
- name: jellyfin/jellyfin
newName: jellyfin/jellyfin
newTag: 10.8.13
- name: ghcr.io/jellyfin/jellyfin-vue
newName: ghcr.io/jellyfin/jellyfin-vue
newTag: stable-rc.0.3.1
newTag: 10.9.11

View File

@@ -1,27 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: jellyfin-web
spec:
selector:
matchLabels:
app: jellyfin-web
template:
metadata:
labels:
app: jellyfin-web
spec:
containers:
- name: jellyfin-web
image: ghcr.io/jellyfin/jellyfin-vue
resources:
limits:
memory: "128Mi"
cpu: "30m"
ports:
- containerPort: 80
env:
- name: TZ
value: Europe/Berlin
- name: DEFAULT_SERVERS
value: "https://media-backend.kluster.moll.re"

View File

@@ -1,12 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: jellyfin-web
spec:
selector:
app: jellyfin-web
ports:
- protocol: TCP
port: 80
targetPort: 80

7
apps/minecraft/README.md Normal file
View File

@@ -0,0 +1,7 @@
## Sending a command
```
kubectl exec -it -n minecraft deploy/minecraft-server -- /bin/bash
mc-send-to-console /help
# or directly
kubectl exec -it -n minecraft deploy/minecraft-server -- mc-send-to-console /help
```

View File

@@ -0,0 +1,16 @@
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
metadata:
creationTimestamp: null
name: curseforge-api
namespace: minecraft
spec:
encryptedData:
key: AgBYeAiejdmxDBorvgnxQX5YvUhR3NId2vfWybMKlc27e6D/bKglLNyZMk70xSnFAPjcDmZ20mYjFPYvDOr9T6IU/REJ8QlzoKAn0xW779R4SkIxRToT+dJv+OM2avgQ9uqp7vja29xeXMjYAnQML+QGZKcrT8mE04G/Ty8rdUiv3yUXK5HFAR3SUF35aVLdlthLjpRkv1s0R7GAP4L2pNzBJNV3i37viceUSSjU0zpOa23fsQOkPAs67AIukAJBqh/hyF/hR9H1GeYZNTI3OcHcvC2iNk/XGstvv0Zy6ApzoebsfWGdsbVn+QUI0EBw+mSTPqpl71cbkz0v4S4XAVndosxWpe6AIgm5MBTU0FXIyGyoFDe1aMPq8BXiQikYVwB48oVNh9KF0xXX5AOG0whB/FEsL3OJsiNQvQ3R/Hru43JBn64oxjVtLfM3E7u8v/xr1VQahX8dylDmb4s5EV01U6O4y19Ou4td1eEMlhpJb0fBPDRUYuWxZAEDGmp+U4tAakyPed11VkcZPPn9fKAAcv8sGs3TYAbbF18hqsBnv2Wd+i7ZEvKwmdmfR/T0r1TJGsvKI7jaW0QtH256XrSxQp7a52qMKMVQWOSKw2k27t/IkRhxT2Prw4GfJvaVr4RozUaBf3LV/hfDWlDfmM2zg3X9W8HkzjotGg021OLxsa0Wzmhffvb8h4bvZwxeq3U1xaJocqXui7z0rT2pF4z3wYHR/lPtexHcOA2M8gfBGKb1rBKh+kW+N+/ZfVLNI0mokg5vrTO2nR2rb4c=
template:
metadata:
creationTimestamp: null
name: curseforge-api
namespace: minecraft
type: Opaque

57
apps/minecraft/job.yaml Normal file
View File

@@ -0,0 +1,57 @@
apiVersion: batch/v1
kind: Job
metadata:
name: start-server
spec:
template:
spec:
restartPolicy: OnFailure
containers:
- name: minecraft-server
image: minecraft
resources:
limits:
memory: "10000Mi"
cpu: "5"
requests:
memory: "1500Mi"
cpu: "500m"
ports:
- containerPort: 25565
env:
- name: EULA
value: "TRUE"
- name: TYPE
value: "AUTO_CURSEFORGE"
- name: CF_API_KEY
valueFrom:
secretKeyRef:
name: curseforge-api
key: key
- name: CF_PAGE_URL
value: "https://www.curseforge.com/minecraft/modpacks/vault-hunters-1-18-2/files/5413446"
- name: VERSION
value: "1.18.2"
- name: INIT_MEMORY
value: "1G"
- name: MAX_MEMORY
value: "8G"
- name: MOTD
value: "VaultHunters baby!"
- name: ENABLE_RCON
value: "false"
- name: CREATE_CONSOLE_IN_PIPE
value: "true"
- name: ONLINE_MODE
value: "true"
- name: ENABLE_AUTOSTOP
value: "true"
volumeMounts:
- name: minecraft-data
mountPath: /data
volumes:
- name: minecraft-data
persistentVolumeClaim:
claimName: minecraft-data

View File

@@ -0,0 +1,17 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: minecraft
resources:
- namespace.yaml
- pvc.yaml
- job.yaml
- service.yaml
- curseforge.sealedsecret.yaml
images:
- name: minecraft
newName: itzg/minecraft-server
newTag: java21

View File

@@ -0,0 +1,4 @@
apiVersion: v1
kind: Namespace
metadata:
name: placeholder

11
apps/minecraft/pvc.yaml Normal file
View File

@@ -0,0 +1,11 @@
kind: PersistentVolumeClaim
apiVersion: v1
metadata:
name: minecraft-data
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 1Gi

View File

@@ -0,0 +1,12 @@
apiVersion: v1
kind: Service
metadata:
name: minecraft-server
spec:
selector:
app: minecraft-server
ports:
- port: 25565
targetPort: 25565
type: LoadBalancer
loadBalancerIP: 192.168.3.4

View File

@@ -7,8 +7,8 @@ metadata:
namespace: monitoring
spec:
encryptedData:
password: AgBe8isrCWd5MuaQq5CpA+P3fDizCCDo23BVauaBJLuMRIYbVwpfahaJW7Ocj3LTXwdeVVPBrOk2D6vESUXu6I0EWc3y/NFN4ZezScxMcjmeaAb+z1zWwdH0FynTPJYOxv1fis1FDTkXDmGy3FXo5NDK9ET899TtulKFkh7UqSxdrRWbD3pegJgqKGPIqDCTAxZN/ssiccfWGS4lHqQBJkXn8DeampcKwjOCvgaBdilF03GoSfpgsqa2Iw2SfTDEobWBWVMMK/RB3/Oi/YJkGwMW3ECUxvTDam8gb0RFA1xjWXoYTLVVP5fK7q7x63ns51HebloxAP1GBrt138N/iDrfbGfjNP8Lx0NFl5y5bTgYN/z8DVTOFf90xxWe+YYERdwllg0Ci1JLNbA+NszXTD4L/HC7a8XuBfjRzxMTeymNjR76jzfPkH6v1EvesOduTfSrahPgS0qS+eGOier1rHxj3EBRhOScY1ut5Bq4oJMNId9nMVbVa6xyq2HyxuJHXV+j6h5FGHmEXn9gIR7wGp8RhtPhKgVGLrHcbHZ5Th2E7eomz1T2NK/ezNP8ZhcwOj/lyGywlW0vhU798zpWhMf57k2OPeuMlfs8Y8y74epBdyBjsrMR4EDctF8RZR3vraxENiMJ6kk1gqKj04ir6HwL7blqwiybIFFnJrp2j7MzgjS4SQ687qMX5Zf5XT03aEE+9W9Epy73tT7zVQKdENCQlcm5
user: AgAdiOivMn0d+nYjYycMZz9QSiS/9QqwHPJQMHkE7/IOou+CJtBknlETNtdv84KZgBQTucufYqu3LR3djOBpdnQsYbIXDxPFgRZQ11pwu/sO2EGifDk218yyzzfZMvx1FL7JL4LI1rKoiHycZowCwsAjEtlICVOOYv1/Plki+6MHXiAGG4r/yUhugGx3VLLX+Poq8oaTeHndgSsFXJege8SfgYR4TsC7pQgsM1UQEFncGIhJYTD2ashmUxFJ+7CJjHqPR0lFRrZXmFvPwTYTCMT+tnSHnCFWtTht8cEi1NxA4kD/eKEX0rOol15EUZnFUws2WqWI634TbyGwZ7km/Yw4XoDxiQR4ar6ulkqb/djcc3cWDYE7PF1m1c+r3iog85S5CSfZ5EvdCHHrbPN9uO2gmoRQWiR5qI70YMxBSnkeLZWN05O1vUuopdXFDTafY7YskxLEdIGHGqFUpUrJZOvBB0zNBdHGgYxFzb5pNmMCC5LPlOuoKjV4yskh9Tgovz06aAvsPxn2WWx6NOJambeziKB5OmSKvPsFofViyGBekVAWSWtt9yJe6lu5OKpBEiA6xhGhQ4ZryTXu9wvVALuPSIwBFITv85sIxjJb80qhJ51wb12QgzLLcPby0HSanyBI1M4jfsXWpK8gIAbDNO+eD7z3PhD9Y/5hPqYKXZ37Geyq23xiyxG8XDj6cL+Ie6k8XipayI4=
password: AgAwMLnsYN1y8JQSqgGQbNG/8jKensTDsEw6ogITdkhDRlJcg8HQ5t7a6xLzNCrLHLJiQW8YOoyLT4lvFkBRMOa2EYcrDvBiRD0PjygWLIscKa7dA+jpAUf/icD9zsiDnTym2yf+VUANcmEgE6DiNvlcsrcmYqiR4pKVUTDlKPNOjOpTJ3nXETb3/sbt69E0JSGwtkvusYQSXKLU9KLbciihv+ycdkdlC9xy9myd4+vYZYXSh/eAvyZeb/hsmdSX7yaASmupMvet6Qsdt99PNzFQxtbQH+LQvYalVZ8bjWZQvCN/p0bA4H15otKBfe8rtEwVthgvyEvo6TK0Mg0pFY/b3AOGFmImnT3rDmgG6S8KTZH0Jce17ksFqvELQmHjqHuYpQsPDl44glM8kWRJ9Mf/Z424LRwZlJNVcOkuVl4qFqPUjzd2rWIyF0RaD0BE012C0ThJxKn2l17lVJbNtdUiR3qNpW01ot2m0CgKd2kXbjDmgRgAll4WgrukfCIn9ZnE0gVCFLJuK3MOQAaipFYy/bDO0izwl9T8nldgcI8OfiC3NTk2O+Es5jJRXu0oJGaC3HrTB7wXiwOoELvAsxLTPxKBiN9mCHCMtZX0PEtrio0dFRQ6Pi5xPng0KVT0I9dvGNsPdhPETNOB913WEvbgP8Gt3cj016nCzk51eUsYbXPpNL2B4kmbIhecqW/8kwKQPwYjVlBSXj3NxjzwMY6PvOl1
user: AgBqmjCYGMqy5zBE+vhtsynOvhWdHWDJDyl1D+laBtLjXTJwzRbNTdunHYo1ekwyqQ6Cr5pi4YMiLxAl1LIHF+Lfsp2QlY+ResAGzp9WgSBtNQDX3EmLDQofeWxMUDdMtMsE9wiKLCfNGDkRDsGquXTz+YFq03m1vH9cB8Bp+1ClWOTui+/Ce0MZlWsJZX1W8WXH7XTirtwUo0s53pc4AplUUH97ZEK3KSIxWa3gLCn0sAPDDLPX+JVA2xtpMq1XuVFiFifjzEtG2h0dejiF35FtSAR+rR4YmEfimk3QpRDfOqV5QUxvjCG+dTV49upSevF2mvbHW+o+lB6vEc6l9cZXvlbnMdaep3NmOsJcJ8wQIdFpFK4iVzFOTKSEbzLPlZ/J+sjS5vDXsfthorIO2faMA1iIf+I663zNxQU5btaK4TNYOZQlrFVjAmioRLkDhGZ6tDUPX/zMv+Crt+0HCwyEyhmvFZckDvezTZrxARSXXMKBVcvjHCyUNkz7ubZRiMU0PGM7fYuHr659e+XMRvj+LFA68ZaEIzCQpCFJenWWYAXgUdRG4LQ1LP2MwvRHpkOYSoRkHIpX7jOfhX82A60h/ta/CdbWifqNyL9OecvE3FKsZu/Kr0taw9W6nm6FBhQLgFkOnFrqp9dWnxfHruXuDBgcn0iE8nR7Ht2zS7hfQPeR4a3Y0xK3Plqbzdrb9HKnWQQhf14=
template:
metadata:
creationTimestamp: null

View File

@@ -1,25 +0,0 @@
apiVersion: v1
kind: PersistentVolume
metadata:
name: grafana-nfs
spec:
capacity:
storage: "1Gi"
accessModes:
- ReadWriteOnce
nfs:
path: /export/kluster/grafana
server: 192.168.1.157
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: grafana-nfs
spec:
storageClassName: ""
accessModes:
- ReadWriteOnce
resources:
requests:
storage: "1Gi"
volumeName: grafana-nfs

View File

@@ -31,7 +31,7 @@ datasources:
datasources:
- name: Thanos
type: prometheus
url: http://thanos-querier.prometheus.svc:9090
url: http://thanos-querier.prometheus.svc:10902
isDefault: true
- name: Prometheus
type: prometheus

View File

@@ -1,25 +0,0 @@
apiVersion: v1
kind: PersistentVolume
metadata:
name: influxdb-nfs
spec:
capacity:
storage: "10Gi"
accessModes:
- ReadWriteOnce
nfs:
path: /export/kluster/influxdb
server: 192.168.1.157
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: influxdb-nfs
spec:
storageClassName: ""
accessModes:
- ReadWriteOnce
resources:
requests:
storage: "10Gi"
volumeName: influxdb-nfs

View File

@@ -1,26 +0,0 @@
## Create default user through docker entrypoint
## Defaults indicated below
##
adminUser:
organization: "influxdata"
bucket: "default"
user: "admin"
retention_policy: "0s"
## Leave empty to generate a random password and token.
## Or fill any of these values to use fixed values.
password: ""
token: ""
## Persist data to a persistent volume
##
persistence:
enabled: true
## If true will use an existing PVC instead of creating one
useExisting: true
## Name of existing PVC to be used in the influx deployment
name: influxdb-nfs
ingress:
enabled: false

View File

@@ -5,16 +5,16 @@ namespace: monitoring
resources:
- namespace.yaml
- grafana.pvc.yaml
# - influxdb.pvc.yaml
- grafana.ingress.yaml
- grafana-admin.sealedsecret.yaml
- dashboards/
# grafana dashboards are provisioned from a git repository
# in the initial bootstrap of the app of apps, the git repo won't be available, so this sync will initially fail
- https://git.kluster.moll.re/remoll/grafana-dashboards//?timeout=10&ref=main
helmCharts:
- releaseName: grafana
name: grafana
repo: https://grafana.github.io/helm-charts
version: 7.3.9
version: 8.5.1
valuesFile: grafana.values.yaml

View File

@@ -1,52 +0,0 @@
env:
- name: HOSTNAME
value: "telegraf-speedtest"
service:
enabled: false
rbac:
# Specifies whether RBAC resources should be created
create: false
serviceAccount:
# Specifies whether a ServiceAccount should be created
create: false
## Exposed telegraf configuration
## For full list of possible values see `/docs/all-config-values.yaml` and `/docs/all-config-values.toml`
## ref: https://docs.influxdata.com/telegraf/v1.1/administration/configuration/
config:
agent:
interval: "2h"
round_interval: true
metric_batch_size: 1000
metric_buffer_limit: 10000
collection_jitter: "0s"
flush_interval: "10s"
flush_jitter: "0s"
precision: ""
debug: false
quiet: false
logfile: ""
hostname: "$HOSTNAME"
omit_hostname: false
processors:
- enum:
mapping:
field: "status"
dest: "status_code"
value_mappings:
healthy: 1
problem: 2
critical: 3
outputs:
- influxdb_v2:
urls:
- "http://influxdb-influxdb2.monitoring:80"
token: We64mk4L4bqYCL77x3fAUSYfOse9Kktyf2eBLyrryG9c3-y8PQFiKPIh9EvSWuq78QSQz6hUcsm7XSFR2Zj1MA==
organization: "influxdata"
bucket: "homeassistant"
inputs:
- internet_speed:
enable_file_download: false

View File

@@ -13,4 +13,4 @@ resources:
images:
- name: binwiederhier/ntfy
newName: binwiederhier/ntfy
newTag: v2.10.0
newTag: v2.11.0

View File

@@ -12,5 +12,5 @@ resources:
images:
- name: mealie
newTag: v1.6.0
newTag: v1.12.0
newName: ghcr.io/mealie-recipes/mealie

106
apps/steam/deployment.yaml Normal file
View File

@@ -0,0 +1,106 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: steam-headless
spec:
replicas: 1
selector:
matchLabels:
app: steam-headless
template:
metadata:
labels:
app: steam-headless
spec:
hostNetwork: true
securityContext:
fsGroup: 1000
nodeSelector:
gpu: full
containers:
- name: steam-headless
securityContext:
privileged: true
image: josh5/steam-headless:latest
resources: #Change CPU and Memory below
requests:
memory: "4G"
cpu: "1"
limits:
memory: "12G"
cpu: "4"
# set nodeSelector to the node label that matches the node you want to run the pod on
volumeMounts:
- name: home-dir
mountPath: /home/default/
- name: games-dir
mountPath: /mnt/games/
- name: input-devices
mountPath: /dev/input/
- name: dshm
mountPath: /dev/shm
- name: dri
mountPath: /dev/dri/
env: #Environmental Vars
- name: NAME
value: 'SteamHeadless'
- name: TZ
value: 'Europe/Zurich'
- name: USER_LOCALES
value: 'en_US.UTF-8 UTF-8'
- name: DISPLAY
value: ':55'
- name: SHM_SIZE
value: '2G'
- name: PUID
value: '1000'
- name: PGID
value: '1000'
- name: UMASK
value: '000'
- name: USER_PASSWORD
value: 'password' #changeme
- name: MODE
value: 'primary'
- name: WEB_UI_MODE
value: 'vnc'
- name: ENABLE_VNC_AUDIO
value: 'false'
- name: PORT_NOVNC_WEB
value: '8083'
- name: ENABLE_SUNSHINE
value: 'true'
- name: SUNSHINE_USER
value: 'sam'
- name: SUNSHINE_PASS
value: 'password'
- name: ENABLE_EVDEV_INPUTS
value: 'false'
ports:
# novnc
- containerPort: 8083
# moonlight webui
- containerPort: 47990
# moonlight stream
- containerPort: 47989
- containerPort: 47984
- containerPort: 48010
- containerPort: 47998
- containerPort: 47999
- containerPort: 47800
volumes:
- name: home-dir
persistentVolumeClaim:
claimName: home
- name: games-dir
persistentVolumeClaim:
claimName: games
- name: input-devices
hostPath:
path: /dev/input/
- name: dri
hostPath:
path: /dev/dri/
- name: dshm
emptyDir:
medium: Memory

View File

@@ -0,0 +1,12 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
namespace: steam
resources:
- namespace.yaml
- deployment.yaml
- service.yaml
- pvc.yaml

View File

@@ -0,0 +1,6 @@
apiVersion: v1
kind: Namespace
metadata:
name: placeholder
labels:
pod-security.kubernetes.io/enforce: privileged

23
apps/steam/pvc.yaml Normal file
View File

@@ -0,0 +1,23 @@
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: games
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: "25Gi"
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: home
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: "5Gi"

38
apps/steam/service.yaml Normal file
View File

@@ -0,0 +1,38 @@
apiVersion: v1
kind: Service
metadata:
name: steam-vnc
spec:
selector:
app: steam-headless
ports:
- port: 8083
targetPort: 8083
name: novnc
- port: 47990
targetPort: 47990
name: moonlight-web
- port: 47989
targetPort: 47989
name: moonlight0
- port: 47984
targetPort: 47984
name: moonlight1
- port: 48010
targetPort: 48010
name: moonlight2
protocol: UDP
- port: 47998
targetPort: 47998
name: moonlight3
protocol: UDP
- port: 47999
targetPort: 47999
name: moonlight4
protocol: UDP
- port: 47800
targetPort: 47800
name: moonlight5
protocol: UDP
type: LoadBalancer
loadBalancerIP: 192.168.3.5

View File

@@ -0,0 +1,19 @@
---
apiVersion: bitnami.com/v1alpha1
kind: SealedSecret
metadata:
creationTimestamp: null
name: backblaze-credentials
namespace: backup
spec:
encryptedData:
bucket-id: AgBwwjlkGjskxMXXpXrnfcT9fJGgDXtbOO/6WcpqsX0exoADw31dADjLTHztiddsGYipiGFf2DBWge69UEnL04NXIzh/xTwWtWaqlz6yOJm/89FMQE1mfbrrLc7tk98TO3oS8i+IDAnkUiYyvDXJexgJg56QLY595PXkpplYit2bAk43mAB02yUZAK0gMs3KRDIvhHFsMq8Uiqx78En5KGGXwEg6KbVDyNvI2k8suEyy+C0yNO/M6dlczoUQiIJbllQzbqIzuxbOp609PfvGFAYHuPlz1kwsg+feZJ3kNsHYi4hWvpd64BWb30iO9J3dAYfW6d7C61t3S5uabmnd9E7bMYZA/OppD8SCknBFalXF91BUiJao9qBVd/BB7TCZOzhdzhxTW+FhgARcA+GIfg+nIzgBqHfAfQQmAOO2RZnsWrvMysyZaUpODvU8kSsLWZJ3CESVRVU3BHmJZpyxjX/s6QEXShQXZaLq54zoFJULZU/kbom5yFNNDWW5sKbURPvbKJcf/J9QabY5toO4yOwDk96Sr/FI+CHHvMh8/amigva0Upq6naiTHXMf4BR6+w3VKP5ALn5cbD5jG7EpUA/j1roMoLn68GMAtTJDLvSq2BGeJENWrUpOmjWZHDKZy8DEKorJk/Wbp46ksteSALE8eXpi4DRKXYDPvDb57EhzoJGQ9NMXgAvU9+1vw2nyTZE4gAWKpg0JkiHglu4Om79HfuGuewzJXlY=
key-id: AgBe4Iytjw9CkT7CqqNLHWyG4F8F+R6m8W1fnQZdGJvLy7D81+nB2ZDMCUZgUQV/mppGnXCkSxHyelcyTYCswQ9bD8hZsAIiQACq39v2UFxdORFEgNMj4bTWPo65fwhSK2giozPqN/4lzPopP91uyq1Z0gQaiqn/HDtbfNjq+Nu9kPmV06O1NUj1f7QqvfsMK+Xadv0G+DAA+ClGaq1q3fZPDkl2MSDdbEPGq+fLPK2DSdYu9fr1bc9TPyaU5JDFvGrCg8Nyigugi4wVGjQFtVwR1QsHAADmnoDAdXoj1Sz65QO5F+zNaDZWvnLnVFxAFrXJHihilc4ilAc8hSpE6YHQm7dGO5gGejLRNaaCspb/fPJD1g2XlnkckhFCSwd9sHNrXb07BZk8gFTHfndEC0t2UyUNloYpXsfap6fvehPK91ey35jbxDk9zYdgKZ7bz/tY0450CkofSbhisf2DwS3prt5YCEDaXasrpUqlk4dSbmLaGm6aee8lM5VpO8qYE56nvXY7qr0yB6z/NGWuLX3oH3fV+C8hH1P4mxDjvVEFdtewlF50Bf9WFoE7KpboSvmChZhBfjOkbtsGmQE41ZuNoYVupetXn6IfvYo6MzGoG6dTRVpJ5S2KLQDtsUljQfXJDFCSYwo87DD2dGBEn/z9GFCIPAYNO2ewzU4RUgcXPuDD4I2tdNIC+xdEBZq7BaWn/46Yfc1+FAWUA9VWEL/9kz5tK6hFD3Ww
key-secret: AgBcKSHdXHeNBzkZRtbaOEZra7AAWVlzmubaQoklECr14gKNL7rTReqX87qQObjQjmGKXtnJlKXIVHGDuiuHGkqfxQ9PCxccvpA3/7LdbFZnZtlFDWpAv+VB6Tp7H7Quho/GeAo8u6de0BXz85lz7+RyDCssBpuuzpchMgOlcEmhhfgQM5E6ye7bD6LpAZWcay3PV6FW2xTrJvLobpCcJordye6iTdSySPKdk6zflkon9h1KuQT+njmW4cfTQg/u7iS/NDQYcHdCpDHRLCor4GkVmi7NW8q+WuYhUSGWBy55SGvcUobhUL7GEHFJZpKmyrBOwSbwiWUDoN+NjI2TR5xvG0Ldjd/Hj32Vk29I+xSnj/O7pZj5ho35qExlZ/WCe42i0VHjzHFbOoU1MkqB+Skm24L1cLufhyNBtA8NNN3GWZhkcozpe164gpx4H/Vfe0UyzxUn4VJIws/IXYiLb4DgDkGrV+wzigN2QfSgTgs6syQkSs4UJ4gUZeN0jsyq0YHIhq1VZ8qPtLH310d8LZLxpTjZdO0obBwJfnHkg3blwSABEt5756C5DvjKmvO1pjG+JX/PJ0yAINL9Sc+FsY7TnGlItVzD830NcZ3Gg9C4Tg4xBEHybUWCSl1rJjwMvmUvVKNcIzLBHPAOyle1VLTZ37zb13MnhwNwdUtBu7+RZTy9wVO26iqemXTtFVj13kgZkJsyLjM6bo2y2wvFmjBCV9EKQtm87ROStM7iKB46
repository-string: AgAEYSqT6VR4OKW9/ZDgjYV+rm4tK3uucZsQG2u7W+8rRO0Rowkuba1AbybjgTE+G3q8si/GtlgsB1J8suvztHcVLNxg0y0Olb40pn2sZjKd85Q8AzsM0pFMkzme1lvnwu1Bcgd6Ck+FCr4CuUnh+UvJM5iWhAoSHJtdBb/EKw2F1BhewAqMXSX4oWM7V/T8RTtW2wBUk4wgX4ia+gCBPMpTo6i00ZtSpRB3Ub9VfGJfRmiXZA7oh5j3yN9nJlXonbJYBp4DNWod76CF7s35HBnSzS7YfIV80R4lH4xAPgRbZ0tvPWkTLMhSBX8rJCEnxT7DjL2lS7WfyboLdL4Uy8WmP7n2difZSr7p2iic6SCP44YgQ8JY5UgXh2QZENQ6oLvjK/PpFTjQ4bfw3E1/dakg1EdCYc/6DPyK3YekccUe/pXvVBrazpgObxXWeKKT6RMDeYWLUXTBHWhJ5OaJHHePD5t9IM65FlFTtkuUFbxExTi/u+RczBlWWcy10Yow3I3LGrDPJ/PfBy4RPfHCeQDQ+WoLJkNT20nu5mBXEYYKgNvgdn4yogifSiNG8vsNpo4dO89aHppbnHdmdp7F9asfbn27btEoFoHzIFku/mEnY5srs+Smrhhzy0+iPKge9LOuW33Gi4oJban1UYFvLAjq8YsZGkbIO82r2p+v15UjwU3girWPl1KBUc8WzLJtnqBvRPWS1ul6/X9JIdmmrHJjC0KlcGiVMU/Ls9ljnQj7dgLXuDW7JzuK3MhTWV3Y2kS39ze3TskCKcbL
template:
metadata:
creationTimestamp: null
name: backblaze-credentials
namespace: backup
type: Opaque

View File

@@ -46,14 +46,27 @@ spec:
name: backup-nfs-access
env:
- name: RESTIC_REPOSITORY
value: rest:http://rclone-gcloud:8000/kluster
# lives in the same namespace
# secrets live in the same namespace as per kustomization.yaml
- name: RESTIC_PASSWORD
valueFrom:
secretKeyRef:
name: restic-gdrive-credentials
key: restic-password
- name: RESTIC_REPOSITORY
valueFrom:
secretKeyRef:
name: backblaze-credentials
key: repository-string
- name: AWS_ACCESS_KEY_ID
valueFrom:
secretKeyRef:
name: backblaze-credentials
key: key-id
- name: AWS_ACCESS_KEY
valueFrom:
secretKeyRef:
name: backblaze-credentials
key: key-secret
volumes:
- name: backup-nfs-access
persistentVolumeClaim:

View File

@@ -17,10 +17,12 @@ spec:
# RESTIC_ARGS Can be for instance: --verbose --dry-run
# RESTIC_REPOSITORY is set in the secret
- >-
restic unlock
&&
restic forget
-r $(RESTIC_REPOSITORY)
--verbose=2
--keep-daily 7 --keep-weekly 5
--keep-daily 7 --keep-weekly 10
--prune
containers:
- name: ntfy-command-send

View File

@@ -8,7 +8,6 @@ resources:
- namespace.yaml
- pvc.yaml
- restic-password.sealedsecret.yaml
- rclone-config.sealedsecret.yaml
- rclone-gcloud.deployment.yaml
- backblaze-credentials.sealedsecret.yaml
- cronjobs-overlays/prune/
- cronjobs-overlays/backup/

View File

@@ -11,8 +11,8 @@ resources:
images:
- name: octodns
newName: octodns/octodns # has all plugins
newTag: "2024.04"
newTag: "2024.08"
- name: git
newName: alpine/git
newTag: "2.43.0"
newTag: "v2.45.2"

View File

@@ -0,0 +1,31 @@
# Using gitea actions
The actions deployment allows to use gitea actions from repositories within this instance.
### Building docker images
Docker builds use the kubernetes runner to build the images. For this to work, the pipeline needs to be able to access the kube-api. A service-account is created for this purpose.
To use the correct docker builder use the following action
```yaml
...
- name: Create Kubeconfig
run: |
mkdir $HOME/.kube
echo "${{ secrets.BUILDX_KUBECONFIG }}" > $HOME/.kube/config
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver: kubernetes
driver-opts: |
namespace=act-runner
qemu.install=true
...
- name: Build and push
uses: docker/build-push-action@v5
with:
context: .
<other config>
```

View File

@@ -1,24 +1,23 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: actions-runner
labels:
app: act-runner
name: act-runner
spec:
replicas: 1
selector:
matchLabels:
app: actions-runner
app: act-runner
template:
metadata:
labels:
app: actions-runner
app: act-runner
spec:
hostname: kube-runner
restartPolicy: Always
containers:
- name: actions-runner
image: actions-runner
resources:
requests:
memory: "128Mi"
cpu: "500m"
- name: runner
image: vegardit/gitea-act-runner:dind-latest
env:
- name: GITEA_INSTANCE_URL
value: "https://git.kluster.moll.re"
@@ -27,5 +26,35 @@ spec:
secretKeyRef:
name: actions-runner-secret
key: runner-token
- name: GITEA_RUNNER_LABELS
value: k8s
- name: ACTIONS_RUNNER_POD_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
- name: GITEA_RUNNER_UID
value: '1000'
- name: GITEA_RUNNER_GID
value: '1000'
- name: GITEA_RUNNER_JOB_CONTAINER_PRIVILEGED
value: 'true'
securityContext:
privileged: true
volumeMounts:
- name: runner-data
mountPath: /data
volumes:
- name: runner-data
persistentVolumeClaim:
claimName: runner-data
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: runner-data
spec:
resources:
requests:
storage: 5Gi
storageClassName: "nfs-client"
volumeMode: Filesystem
accessModes:
- ReadWriteMany

View File

@@ -7,7 +7,7 @@ metadata:
namespace: gitea
spec:
encryptedData:
runner-token: AgBHwek/Aj/0oOnI/bnZ4FgtRoeJw4tIKvcDzBhaPdQ7bMVHyHUKYUNP7lkPgZrIN+7rhMY7C/j13iGWx4iTdhTgipLiJvyZ70pXKLSix4IpcypJTElggWkW0JW79x1HyJfBtn9iJiHnEZXPi7sEnyKhA0asAOR0ae8NS6mxxei0TIImaPaC2RHL6MOi40xsXpHz2ZaVhDQaTSRWjv0U6+WkCGcueqM2HLYfF1gqqkzGCjjhdOTK1CKvIvApZ5n8x6x94IiywCXJraDCwLz+acF2c2vA/Jb/3p7TwyyRZ5uIF5LZufhTJ6+5sFJSReHYxO4CpPA8KvM880vtiEjN7LxVo/Jruj2459OvjviKZS03ZwLHHrjanom1+HA9Sx2ffRLiR5ayGkfj/6kvpIRt5x1F7BbPp+a0LXuxJX+1nGDyEa1D1WzVKvZASav6/v7cXcom/nKGO91Zb8qHlOv7ZTs5guGQ9G9VCOHOG8szwpW3ZmQwWfFoWsShzqbDqszBYOGeIjIiDllLzTZ8A9dv9J2ELngZ1IPGIkfpQNEW8hsbNXTYhdVIrkh7BIFkRWfYDNWxqZd4iE6XllQcT1rqndusgiNEJX2r+P4nT8dPewATXQ79wzvZU3kB+VHzM8cLymlVGADi7v/qTY9RcrhuE0oMLzHRShr6JU05VfLGbMsttrYKmW7smvBp3lRJitO5A8+r8cRniS1+Xr8mIx87vCvnoWSH6BKkl9pCdDeCGylAWfkJN9UpkaKg
runner-token: AgAUU0jMe3bhoaOdqRZjRzvuQyRMagahDQtX2eqoJ78xihMPkL2yK5MZoCbcps2+xq2zSBgtdwA8xAMyVC4aKkeqYaPSlvBcvuGbcEsnGYJB1Fmjqn2CbvF4nbfaio+XMBmhZXW+GiPWmeiID6LhMwZghzVmcLEuqSmBJ0uB203j0wqsz/k9haL5zZ3vZRE0ofNFceDiVE55TrvTBiLQf1H6R9kFSaRRvcuCH8desX3OmkcSZ0PktULM7KElF9pX1gndrbwiEL5XK60KzE9URl2qpTK/mRrN88ZBa6IuX7u7M579yD3d7yS/JgYi2TL8s3Z69v8JF/nF1ha19xJFhEp1iiyS40xo8cuGHbfVzDSExbJ9fQMpG+1w8ZmyiARXT0EMjuz7tBSruKlr21R6lvwyri71Zg6cUKoVcmQlcmEW7Y6TkH4dsOGlpBX2KsLai7ObGgsQePZ7BHaMTEl54omtdsNsQaquElKhhhBVLEGGQgbP/YZ0wT244mgQkjuMLjVxAM1IWsu4THUY16F+bphzw4xYesZTYYCJUpNO3FDvcsyqlMgPlLMnO3CZyt+Y1avrfz/id5eJUxlVFx9y5htzXA1GaBgrnoRkrpv2OVRFIxatASGbbQgqcDIWx3VXfjVF32fnzVUNtiTZ+pvC/UcyAvFZmaZIrdbK42cA85O1FaOThHJg+8rpc4RXWOOiVg8+8BAQUd/c9bdPJeYLavDefaI5O9DZT4UqiQioBCET2yZPIhwm9JBT
template:
metadata:
creationTimestamp: null

View File

@@ -1,84 +0,0 @@
apiVersion: v1
kind: ServiceAccount
metadata:
name: drone-runner
---
kind: Role
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: drone-runner
rules:
- apiGroups:
- ""
resources:
- secrets
verbs:
- create
- delete
- apiGroups:
- ""
resources:
- pods
- pods/log
verbs:
- get
- create
- delete
- list
- watch
- update
---
kind: RoleBinding
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: drone-runner
subjects:
- kind: ServiceAccount
name: drone-runner
roleRef:
kind: Role
name: drone-runner
apiGroup: rbac.authorization.k8s.io
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: drone-runner
labels:
app.kubernetes.io/name: drone-runner
spec:
replicas: 1
selector:
matchLabels:
app.kubernetes.io/name: drone-runner
template:
metadata:
labels:
app.kubernetes.io/name: drone-runner
spec:
serviceAccountName: drone-runner
containers:
- name: runner
image: drone/drone-runner-kube:latest
ports:
- containerPort: 3000
env:
- name: DRONE_RPC_HOST
value: drone-server:80
- name: DRONE_RPC_PROTO
value: http
- name: DRONE_RPC_SECRET
valueFrom:
secretKeyRef:
name: drone-server-secret
key: rpc_secret
- name: DRONE_NAMESPACE_DEFAULT
value: gitea
# - name: DRONE_NAMESPACE_RULES
# value: "drone-runner:*"
- name: DRONE_SERVICE_ACCOUNT_DEFAULT
value: drone-runner

View File

@@ -1,117 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: drone-server
labels:
app: drone-server
spec:
replicas: 1
selector:
matchLabels:
app: drone-server
template:
metadata:
labels:
app: drone-server
spec:
containers:
- name: drone
image: drone/drone:latest
env:
- name: DRONE_SERVER_PORT # because the deployment is called drone-server, override this var again!
value: ":80"
- name: DRONE_GITEA_SERVER
value: https://git.kluster.moll.re
- name: DRONE_USER_CREATE
value: username:remoll,admin:true
- name: DRONE_GITEA_CLIENT_ID
valueFrom:
secretKeyRef:
name: drone-server-secret
key: client_id
- name: DRONE_GITEA_CLIENT_SECRET
valueFrom:
secretKeyRef:
name: drone-server-secret
key: client_secret
- name: DRONE_RPC_SECRET
valueFrom:
secretKeyRef:
name: drone-server-secret
key: rpc_secret
- name: DRONE_SERVER_HOST
value: drone.kluster.moll.re
- name: DRONE_SERVER_PROTO
value: https
resources:
requests:
memory: "1Gi"
cpu: 1.5
volumeMounts:
- mountPath: /data
name: drone-data-nfs
volumes:
- name: drone-data-nfs
persistentVolumeClaim:
claimName: drone-data-nfs
---
apiVersion: v1
kind: Service
metadata:
name: drone-server
labels:
app: drone-server
spec:
type: ClusterIP
ports:
- port: 80
name: http
selector:
app: drone-server
---
apiVersion: traefik.io/v1alpha1
kind: IngressRoute
metadata:
name: drone-server-ingress
spec:
entryPoints:
- websecure
routes:
- match: Host(`drone.kluster.moll.re`)
kind: Rule
services:
- name: drone-server
port: 80
tls:
certResolver: default-tls
---
apiVersion: v1
kind: PersistentVolume
metadata:
name: drone-data-nfs
spec:
capacity:
storage: "1Gi"
accessModes:
- ReadWriteOnce
nfs:
path: /export/kluster/drone
server: 192.168.1.157
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: drone-data-nfs
spec:
storageClassName: ""
accessModes:
- ReadWriteOnce
resources:
requests:
storage: "1Gi"
volumeName: drone-data-nfs

View File

@@ -1,23 +0,0 @@
{
"kind": "SealedSecret",
"apiVersion": "bitnami.com/v1alpha1",
"metadata": {
"name": "drone-server-secret",
"namespace": "gitea",
"creationTimestamp": null
},
"spec": {
"template": {
"metadata": {
"name": "drone-server-secret",
"namespace": "gitea",
"creationTimestamp": null
}
},
"encryptedData": {
"client_id": "AgA53a7kGJ6zZcx2ooTvTNwxaW2FvfzHJnxg6co54+HXinTJKsc4+GJ1PtdIbsZ7Dgu/sLi/4X90fT+PT2sgEx9jIilmHPdJeRtwV1UID3Y46A7cJlfcAKwNOFzp2PWvBvizbNp7tbJwxeAYnVX8GfN6fi700QxBGqAI3u8qQvLpU6UGW2RM96gCXI7s1QhE1Le6TgoESy5HX95pB7csDRNSwVE02OWfDHKEjH8QD8UvBB9xct6uwDfu7KrsJiNJvWMP6arvpfhy/X+UtCTFmj5wmFYL7oc6vSiCkq+QyHgQTEHTmGpEjEGKcQxPQaus3KhbhcxQBYLMEMYRlLPH0AEAA4dzbSpoVXM3LuIe9FppgrTCknK1uRB8wyrHUeInWO8mG7UraV6m5PUS+UYODMvfjwY3PyiGhTSf6LgMlhMl8e+2rb+OsWphT8Pbeom33PucrYaRFr9RpQkJSwE6HU3JEh25YLfIJ7caqRND8C/p8kD679C8UMcNpBN8WS4Cswn5jzmwbeJNM5DGp9yQVZNx7Bv3dHzx9i3ShjJ6QQnR/zWJZ/dWLy6weGYmdZMMXRAO8CCdruvcX5YyeieXZfchSIlZ/GqqBHptdcLpwLiZsfmyTWeBvk5pMAsZaKJ1tfWpQ84s4epzMoieTfhTueGXmeRKX+DJBBcriU+5YoqNxpU1lPL+LoInorJSKN7c3ouFx78N3GDOCq7mlWI94lY0bIs5zhrfUN137ITCcED62AJ7vks=",
"client_secret": "AgDQXU7x6RLhE9Hc+goeR2+3rW316SLLLA8tfqx3tsykL+vxhRkY5UCEaak3Rgei0k14jB/Rmme+/O/D1/5tc/i885+sGn0yjU7Jo4L5nkIssUOHlmRSGkRJDb9ABPauFXAjap9KLix9bd8ewI7R0lS3tOK9ZhThYhcfDUqV9qkkbSHzwNptkH7gYWt9qzG/rqqqpFP+PCtjzKVve4LCBgaxetcnh1t+d5oh7VAFnSI9Bt1G/DRzi+K3YZ+YG5+XKevBp06GMiLUMiv/eUvmOfAB/KO79LnNVbOcRsAHfnqLbXgNjFzspr5xDiGMC/ma1245LavywqXDp0S9jjNEe48i51PPQMwHWV8XEovsM6LHcteluNogt+VkL4mOnmP+sba/V3NO51rt1WXl+ca+U4kBq4dLMsdpWUKemz9BlIRC4etEXjwKJ5DznT7u6GUTrXx2RCm1j0OYWM++P10SdyD6tGjKnZf88a33Wrwm8Y7c47JrPTlP4PqLq9gzvD310uVfs1vGYGULaToGy+D/th8qiWWlu7BIfwqlIj8lruVnOhQ4GeEZmUAsqYf8JfsBwuDc0Y+8qbwjFrr2z+5x+2XBL8KGZVopyme45SHijlBZs7YsJqTBsg5oW09grM8/oO731GtzSYmpat2VZlaILuTjALqo/cu//kxwmqh7UX+jnTJ/2N3bKKSAfHWbHDeHeS2XJ+eKaI4onNYW9J70EfAP3vOpU+zmQ8rOzJuJjRt0HarLwzc5CXb1Xhlgsaoj7zKXPQMnqIDngg==",
"rpc_secret": "AgAcJNCFtOhK28vnLredkTgsVpnMPwaXss5NT5ysc0IbVid2vWRk2CTjBZc5DzjxxLwI1Ok88MFXHP08ZGCYy4rIbwoi7Ei1OEevGWfaI4n5CvAxr4ZamQHSfIX9dVAm9BSSx2M/mDtCKqVEGJEzyHCedrxf6LXM/YTNgjD43BuCZZMu35mRsHItpYFZQSttlHiUvR8y2YKrhV2P7fiWRD3cCVao8ldzKfGuvRfal8ByGoxpsYLj2D9CdtPvRF/TQsWUJJWwzbI9DmbW1MMI4/b26Jfa5TBvHxS1MQxFJpSXuMIengO+b0bi7WaR36y/FrKSNxIrQDHI7XCb00yYaSfj3RkSBVoAD0a2p8vNupHCqsKBoaWd8tMv/wGP8wbBk4DgGeQiTIvfhbQZU/Q2/LVDDficjXVn3IuKP/cqgGVf6lUh5YsUSs8qwpMil7XySiHvaZn+iFAnsXoejd4S2e/pbRvyaxP1aa7TCxnINjpU7IrnUEUiI4glQmAte3MqZWLXcc0Uk3Qz9PP0cD+V8qCOryrPMP2kTAI8LT/K4DgcEMAEGes4Vx1l0oBMF0xJvhM2kZXcEcf0NzuQJvYTgZpQF5xp0TchezLshmEUSIkII9NvAvn+iEYJeHsJUDijjmBloSYe4+QTgdYh6FakVUwYI5U4ztDNrvgqhWjExfbn8HxaFzsNTsuzGoYs+jwXH8Wk2z1Q1oQjDdO5YTjmdqvkSTdin/5CiuCDHaQX6a4gNQ=="
}
}
}

View File

@@ -119,7 +119,7 @@ gitea:
TYPE: level
indexer:
ISSUE_INDEXER_TYPE: bleve
REPO_INDEXER_ENABLED: true
REPO_INDEXER_ENABLED: false

View File

@@ -5,24 +5,18 @@ resources:
- gitea.pvc.yaml
- gitea.ingress.yaml
- gitea.servicemonitor.yaml
- drone-kube-runner.deployment.yaml
- drone-server.deployment.yaml
- drone-server.sealedsecret.yaml
- actions.deployment.yaml
- actions.sealedsecret.yaml
# - actions.rbac.yaml
namespace: gitea
images:
- name: actions-runner
newName: ghcr.io/christopherhx/gitea-actions-runner
newTag: v0.0.11
helmCharts:
- name: gitea
namespace: gitea # needs to be set explicitly for svc to be referenced correctly
releaseName: gitea
version: 10.1.4
version: 10.4.1
valuesFile: gitea.values.yaml
repo: https://dl.gitea.io/charts/

View File

@@ -2,3 +2,5 @@ apiVersion: v1
kind: Namespace
metadata:
name: placeholder
labels:
pod-security.kubernetes.io/enforce: privileged

View File

@@ -0,0 +1,28 @@
apiVersion: postgresql.cnpg.io/v1
kind: Cluster
metadata:
name: gitea-postgres
spec:
instances: 1
imageName: ghcr.io/tensorchord/cloudnative-pgvecto.rs:16.2
bootstrap:
initdb:
import:
type: monolith
databases:
# Persistent storage configuration
storage:
size: 10Gi
pvcTemplate:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
storageClassName: nfs-client
volumeMode: Filesystem
monitoring:
enablePodMonitor: true

View File

@@ -10,6 +10,6 @@ namespace: metallb-system
helmCharts:
- name: metallb
repo: https://metallb.github.io/metallb
version: 0.14.5
version: 0.14.8
releaseName: metallb
valuesFile: values.yaml

View File

@@ -2,3 +2,5 @@ apiVersion: v1
kind: Namespace
metadata:
name: placeholder
labels:
pod-security.kubernetes.io/enforce: privileged

View File

@@ -9,6 +9,6 @@ namespace: pg-ha
helmCharts:
- name: cloudnative-pg
releaseName: pg-controller
version: 0.21.0
version: 0.22.0
valuesFile: values.yaml
repo: https://cloudnative-pg.io/charts/

View File

@@ -17,4 +17,4 @@ resources:
images:
- name: thanos
newName: quay.io/thanos/thanos
newTag: v0.34.1
newTag: v0.36.1

View File

@@ -4,7 +4,7 @@ metadata:
name: prometheus
---
apiVersion: rbac.authorization.k8s.io/v1
knd: ClusterRole
kind: ClusterRole
metadata:
name: prometheus
rules:
@@ -52,26 +52,17 @@ spec:
requests:
memory: 400Mi
retention: 730d
retentionSize: 50Gi
retentionSize: 3GiB
serviceAccountName: prometheus
enableAdminAPI: false
serviceMonitorNamespaceSelector: {}
serviceMonitorSelector: {}
thanos:
version: v0.33.0
version: v0.34.1
objectStorageConfig:
# loads the config from a secret named thanos-objstore-config in the same namespace
key: thanos.yaml
name: thanos-objstore-config
volumeClaimTemplate:
metadata:
name: prometheus-data
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 50Gi
---
apiVersion: v1
kind: Service

View File

@@ -7,7 +7,7 @@ metadata:
namespace: prometheus
spec:
encryptedData:
thanos.yaml: AgAvZbqz5bYVzhbpfYXKoVHt+wKKuOI2C8pBCkBxuBcORoEf2f6eGVYS+aaojipQk8LaYP2yw/ffRmucKuAH0mVmZllRkVoZTOP7L1ciDmrOpbfrfPtoMJagFY90HKd1FZcI45C64MUYvqHCvF40Jk6rEii9aSmOPj9WyGqh5GrqCO24tt1Prk3o+ZS6Z2tlPRF1PdBpyZcQYYMnSuTgA4aCVuXQF/H5rR1luP85jzJAESp1VEV+xEOUY98ZLaA071fySndFZEn6YflfpixqNWFkloYENMfGL+x9jc/zbvbLyG4Q4FG5etz331GQe29OW7uIibXXGwdKO+a6Wd7a6I9ygA5+V8UdeT9mvB2AZRV6aY3aHifOgTTDDg/9YkXfUnmEwowhzsx7mtumxHOmHssFsWmRCX04wHz596/8/RyqmQrShCdGjtVkhVcEuJPPacC6awj9AZOQcW86uatZgnvnNJPlHjgSYFGxnHS89AWRgY68IF6h4Xhs8CpIcVoujK8Oi05vL3Ypi3g/r0iJ65tbYVo3LfyTnSk4pUgERJHGCPpBmiSo4DG+K9pA+cYyqjgZZGXaGKzLNCUDrBEWg5nY5gNMCyjTEeoQUurz2uPLD07Jme9KUd0a1TpoEsUOvfyyiM1DKW30XTKvIfm5m3voBatV+3oX1XfqyZEqkS7+VtWzqRqYAr0y5MEnBTpIMXQBa8M5SlVvcpaovKk1A6ZzNZExfF77o71v650axcLT3U+sxV8IIRS7y+BDRJGYiXaX0EFOIlIe+YDGFbf2k1kUi1kbhHBQEPCQkTgt79yTLEyXNAGrpNUsNYivWIQ+8GPM6+wDY9NjS2QLFKBt5sPe0VjqxzCeSoUY6rDgmR8rEtWpqOHWfuD5IuyCerzxmh0kMr9vVKjEjfJoSjES2MQRM5wOdVLHJSc=
thanos.yaml: AgByW/LKzPh0QeNsHR8Us4bJ/0chIQErhfh5plY1tjqiZyNLlxZ+NygYYzVggW02k4gAsKs68trbLBbeTTEhpKYP8hUphNb13lrgp07wYpOQjUF57i6RjPM2QNJpO0qLSk/nOPIOtR3XKn+nXxdJDmh3j5y0zxVz5O7MLh7adwOaHlyWTLMJjI1cda8YljDp2FYs24lHHMw4gXAYUecGDJNQqw5Xy9IiGh8kBbcKe3j6bVCj1yxPbHszmvZ2s+Q+mnndXnoeLMhwjZhMF8/PETxmSZ2bs41k3lHm/2rcPQCJsl9CuJEGKhu6ndKrVhtury4/US/FheEOoGF0YZk/AQMHII/mxy8haPNxtQTDs4rfYz/BA8cMMZll44wxOY9gAOmhm3sG6GI9wcB1Z65p98xSuDaInknO80l07vwMAAvmrZbT53Fmefrxl+jE1pImcGEsL0MfP621nTXlOBW9keF+6aUOubrwjPKKSXdqZU21acNbaIeRQSJyaOBStAKLfnPFmaryGisgNu0hCk/WmszZ0/s/ilvdMdAD6kKoiKL/NWfXtHATh/fnd76bKfSzNQk6e+WWfomToYVU0HRgAaWnIzjB9Q4tjxkbRwteEodU+K1BvD4xQ0sfQB2vHlDjQGC3pjIUFCWG0SzQGb7oe6+X2CJpcNIBHwF661iELJpJkg8dLsPtwb+8Rj6BL+ZtyVKYv18nDNON0WVpwJb/IHHSmxfYD5b/q6fATCFj55IXK5Nr4VO65a2Sv5Iv0/TTUVkwb8dkMmwfs5qcQiZ4oKWx8Ol6GkjDZrFARUtHQ/9KiZ9xDj3tPic2TeQfKr27sgc4lEL8RSxaRKHkkxIAioea3YgFfBm7ZfoxMlzJnQ1vI2vDvJcRXhWKSGdXiKOddwLSVMZFsSRRi9AxH87Sjt7j1wvsA7xgBqc=
template:
metadata:
creationTimestamp: null

View File

@@ -53,15 +53,3 @@ spec:
protocol: TCP
port: 10901
targetPort: grpc
metadata:
labels:
app: thanos-querier
name: thanos-querier
spec:
ports:
- port: 9090
protocol: TCP
targetPort: http
name: http
selector:
app: thanos-querier

View File

@@ -1,33 +1,32 @@
apiVersion: apps/v1
kind: StatefulSet
kind: Deployment
metadata:
name: thanos-store-gateway
name: thanos-store
labels:
app: thanos-store-gateway
app: thanos-store
spec:
replicas: 1
selector:
matchLabels:
app: thanos-store-gateway
serviceName: thanos-store-gateway
app: thanos-store
template:
metadata:
labels:
app: thanos-store-gateway
app: thanos-store
thanos-store-api: "true"
spec:
containers:
- name: thanos
image: thanos
args:
- "store"
- "--log.level=debug"
- "--data-dir=/data"
- "--grpc-address=0.0.0.0:10901"
- "--http-address=0.0.0.0:10902"
- "--objstore.config-file=/etc/secret/thanos.yaml"
- "--index-cache-size=500MB"
- "--chunk-pool-size=500MB"
- store
- --log.level=debug
- --data-dir=/data
- --grpc-address=0.0.0.0:10901
- --http-address=0.0.0.0:10902
- --objstore.config-file=/etc/secret/thanos.yaml
- --index-cache-size=500MB
- --chunk-pool-size=500MB
ports:
- name: http
containerPort: 10902
@@ -61,7 +60,6 @@ metadata:
app.kubernetes.io/name: thanos-store
name: thanos-store
spec:
clusterIP: None
ports:
- name: grpc
port: 10901
@@ -70,4 +68,4 @@ spec:
port: 10902
targetPort: 10902
selector:
app: thanos-store-gateway
app: thanos-store

View File

@@ -11,4 +11,4 @@ resources:
images:
- name: renovate/renovate
newName: renovate/renovate
newTag: "37"
newTag: "38"

View File

@@ -9,4 +9,4 @@ resources:
images:
- name: controller
newName: docker.io/bitnami/sealed-secrets-controller
newTag: 0.26.2
newTag: 0.27.1

View File

@@ -74,11 +74,13 @@ data:
address = ":9000"
[entryPoints.dnsovertls]
address = ":853"
address = ":8853"
# route dns over https to other pods but provide own certificate
[metrics]
[metrics.prometheus]
# metrics are enabled and scraping is ensured through a servicemonitor
entryPoint = "metrics"
addEntryPointsLabels = true
addServicesLabels = true

View File

@@ -5,14 +5,14 @@ resources:
- pvc.yaml
- configmap.yaml
- servicemonitor.yaml
- https://raw.githubusercontent.com/traefik/traefik/v2.11/docs/content/reference/dynamic-configuration/kubernetes-crd-definition-v1.yml
- https://raw.githubusercontent.com/traefik/traefik/v2.11/docs/content/reference/dynamic-configuration/kubernetes-crd-rbac.yml
- https://raw.githubusercontent.com/traefik/traefik/v3.0/docs/content/reference/dynamic-configuration/kubernetes-crd-definition-v1.yml
- https://raw.githubusercontent.com/traefik/traefik/v3.0/docs/content/reference/dynamic-configuration/kubernetes-crd-rbac.yml
namespace: traefik-system
helmCharts:
- name: traefik
releaseName: traefik
version: 27.0.2
version: 31.1.1
valuesFile: values.yaml
repo: https://traefik.github.io/charts

View File

@@ -2,3 +2,5 @@ apiVersion: v1
kind: Namespace
metadata:
name: placeholder
labels:
pod-security.kubernetes.io/enforce: privileged

View File

@@ -1,25 +1,11 @@
apiVersion: v1
kind: PersistentVolume
metadata:
name: traefik-certificate
spec:
capacity:
storage: "10Mi"
accessModes:
- ReadWriteOnce
nfs:
path: /export/kluster/traefik/certs
server: 192.168.1.157
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: traefik-certificate
name: certs
spec:
storageClassName: "nfs-client"
accessModes:
- ReadWriteOnce
resources:
requests:
storage: "10Mi"
volumeName: traefik-certificate
storageClassName: ""
storage: "50Mi"

View File

@@ -1,151 +0,0 @@
## Default values.yaml for Telegraf
## This is a YAML-formatted file.
## ref: https://hub.docker.com/r/library/telegraf/tags/
replicaCount: 1
image:
repo: "telegraf"
tag: "1.24"
pullPolicy: IfNotPresent
podAnnotations: {}
podLabels: {}
imagePullSecrets: []
## Configure args passed to Telegraf containers
args: []
# The name of a secret in the same kubernetes namespace which contains values to
# be added to the environment (must be manually created)
# This can be useful for auth tokens, etc.
# envFromSecret: "telegraf-tokens"
env:
- name: HOSTNAME
value: "telegraf-polling-service"
# An older "volumeMounts" key was previously added which will likely
# NOT WORK as you expect. Please use this newer configuration.
volumes:
- name: traefik-logs
persistentVolumeClaim:
claimName: traefik-logs
mountPoints:
- name: traefik-logs
mountPath: /traefik_logs
## Node labels for pod assignment
## ref: https://kubernetes.io/docs/user-guide/node-selection/
nodeSelector: {}
## Affinity for pod assignment
## Ref: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/#affinity-and-anti-affinity
##
affinity: # to read the traefik logs the pod must be on the same node as traefik
podAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchExpressions: # matches labels: app.kubernetes.io/name=traefik
- key: app.kubernetes.io/name
operator: In
values:
- traefik
topologyKey: "kubernetes.io/hostname"
## Tolerations for pod assignment
## Ref: https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/
##
tolerations: []
# - key: "key"
# operator: "Equal|Exists"
# value: "value"
# effect: "NoSchedule|PreferNoSchedule|NoExecute(1.6 only)"
service:
enabled: false
type: ClusterIP
annotations: {}
rbac:
# Specifies whether RBAC resources should be created
create: true
# Create only for the release namespace or cluster wide (Role vs ClusterRole)
clusterWide: false
# Rules for the created rule
rules: []
# When using the prometheus input to scrape all pods you need extra rules set to the ClusterRole to be
# able to scan the pods for scraping labels. The following rules have been taken from:
# https://github.com/helm/charts/blob/master/stable/prometheus/templates/server-clusterrole.yaml#L8-L46
# - apiGroups:
# - ""
# resources:
# - nodes
# - nodes/proxy
# - nodes/metrics
# - services
# - endpoints
# - pods
# - ingresses
# - configmaps
# verbs:
# - get
# - list
# - watch
# - apiGroups:
# - "extensions"
# resources:
# - ingresses/status
# - ingresses
# verbs:
# - get
# - list
# - watch
# - nonResourceURLs:
# - "/metrics"
# verbs:
# - get
serviceAccount:
# Specifies whether a ServiceAccount should be created
create: true
# The name of the ServiceAccount to use.
# If not set and create is true, a name is generated using the fullname template
name:
# Annotations for the ServiceAccount
annotations: {}
## Exposed telegraf configuration
## For full list of possible values see `/docs/all-config-values.yaml` and `/docs/all-config-values.toml`
## ref: https://docs.influxdata.com/telegraf/v1.1/administration/configuration/
config:
agent:
interval: "10s"
round_interval: true
metric_batch_size: 1000
metric_buffer_limit: 10000
collection_jitter: "0s"
flush_interval: "10s"
flush_jitter: "0s"
precision: ""
debug: false
quiet: false
logfile: ""
hostname: "$HOSTNAME"
omit_hostname: true
# processors:
# - enum:
# mapping:
# field: "status"
# dest: "status_code"-+
# value_mappings:
# healthy: 1
# problem: 2
# critical: 3
outputs:
- influxdb_v2:
urls:
- "http://influxdb-influxdb2.monitoring:80"
token: N_jNm1hZTfyhJneTJj2G357mQ7EJdNzdvebjSJX6JkbyaXNup_IAqeYowblMgV8EjLypNvauTl27ewJvI_rbqQ==
organization: "influxdata"
bucket: "kluster"
# retention_policy: "2w"
inputs:
- docker_log:
endpoint: "unix:///var/run/docker.sock"
from_beginning: false
container_name_include: ["traefik"]

View File

@@ -7,60 +7,15 @@ deployment:
kind: Deployment
# Number of pods of the deployment (only applies when kind == Deployment)
replicas: 1
# Number of old history to retain to allow rollback (If not set, default Kubernetes value is set to 10)
# revisionHistoryLimit: 1
# Amount of time (in seconds) before Kubernetes will send the SIGKILL signal if Traefik does not shut down
terminationGracePeriodSeconds: 60
# The minimum number of seconds Traefik needs to be up and running before the DaemonSet/Deployment controller considers it available
minReadySeconds: 0
# Additional deployment annotations (e.g. for jaeger-operator sidecar injection)
annotations: {}
# Additional deployment labels (e.g. for filtering deployment by custom labels)
labels: {}
# Additional pod annotations (e.g. for mesh injection or prometheus scraping)
podAnnotations: {}
# Additional Pod labels (e.g. for filtering Pod by custom labels)
podLabels: {}
# Additional containers (e.g. for metric offloading sidecars)
additionalContainers: []
# https://docs.datadoghq.com/developers/dogstatsd/unix_socket/?tab=host
# - name: socat-proxy
# image: alpine/socat:1.0.5
# args: ["-s", "-u", "udp-recv:8125", "unix-sendto:/socket/socket"]
# volumeMounts:
# - name: dsdsocket
# mountPath: /socket
# Additional volumes available for use with initContainers and additionalContainers
additionalVolumes:
# - name: traefik-logs
# persistentVolumeClaim:
# claimName: traefik-logs
- name: traefik-certificate
- name: certs
persistentVolumeClaim:
claimName: traefik-certificate
claimName: certs
- name: traefik-config
configMap:
name: traefik-config
# - name: dsdsocket
# hostPath:
# path: /var/run/statsd-exporter
# Additional initContainers (e.g. for setting file permission as shown below)
initContainers: []
# The "volume-permissions" init container is required if you run into permission issues.
# Related issue: https://github.com/traefik/traefik/issues/6972
# - name: volume-permissions
# image: busybox:1.31.1
# command: ["sh", "-c", "chmod -Rv 600 /data/*"]
# volumeMounts:
# - name: data
# mountPath: /data
# Use process namespace sharing
shareProcessNamespace: false
# Custom pod DNS policy. Apply if `hostNetwork: true`
# dnsPolicy: ClusterFirstWithHostNet
# Additional imagePullSecrets
imagePullSecrets: []
# - name: myRegistryKeySecretName
# Use ingressClass. Ignored if Traefik version < 2.3 / kubernetes < 1.18.x
@@ -78,7 +33,7 @@ pilot:
# Toggle Pilot Dashboard
# dashboard: false
# Enable experimental features
# Enable experimental featureskdes+
experimental:
http3:
enabled: false
@@ -99,11 +54,6 @@ experimental:
ingressRoute:
dashboard:
enabled: false
# Additional ingressRoute annotations (e.g. for kubernetes.io/ingress.class)
annotations: {}
# Additional ingressRoute labels (e.g. for filtering IngressRoute by custom labels)
labels: {}
#
@@ -114,65 +64,26 @@ providers:
enabled: true
allowCrossNamespace: false
allowExternalNameServices: true
allowEmptyServices: false
# ingressClass: traefik-internal
# labelSelector: environment=production,method=traefik
namespaces: []
# - "default"
kubernetesIngress:
enabled: true
allowExternalNameServices: true
allowEmptyServices: false
ingressClass: traefik
# labelSelector: environment=production,method=traefik
namespaces: []
# - "default"
# IP used for Kubernetes Ingress endpoints
publishedService:
enabled: false
# Published Kubernetes Service to copy status from. Format: namespace/servicename
# By default this Traefik service
# pathOverride: ""
# Add volumes to the traefik pod. The volume name will be passed to tpl.
# This can be used to mount a cert pair or a configmap that holds a config.toml file.
# After the volume has been mounted, add the configs into traefik by using the `additionalArguments` list below, eg:
# additionalArguments:
# - "--providers.file.filename=/config/dynamic.toml"
# - "--ping"
# - "--ping.entrypoint=web"
volumes: []
# - name: traefik-config
# mountPath: /config
# configMap:
# name: traefik-config
# Additional volumeMounts to add to the Traefik container
additionalVolumeMounts:
# - name: traefik-logs
# mountPath: /var/log/traefik
# nfs:
# server: 192.168.1.157
# path: /kluster/traefik
# # For instance when using a logshipper for access logs
# - name: traefik-logs
# # claimName: traefik-logs
# mountPath: /var/log/traefik
- name: traefik-certificate
- name: certs
# claimName: traefik-certificate
mountPath: /certs
- name: traefik-config
mountPath: /config
globalArguments:
additionalArguments:
- "--configfile=/config/traefik.toml"
additionalArguments: []
# Environment variables to be passed to Traefik's binary
env:
@@ -185,18 +96,13 @@ env:
ports:
# add a new one, the other ones are kept the same.
dnsovertls:
port: 853
expose: true
port: 8853
expose:
default: true
exposedPort: 853
protocol: TCP
envFrom: []
# - configMapRef:
# name: config-map-name
# - secretRef:
# name: secret-name
tlsOptions: {}
@@ -218,3 +124,4 @@ service:
spec:
# externalTrafficPolicy: Local
loadBalancerIP: 192.168.3.1

View File

@@ -1,18 +1,17 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: homepage-application
name: audiobookshelf-application
namespace: argocd
spec:
project: apps
source:
repoURL: ssh://git@git.kluster.moll.re:2222/remoll/k3s-infra.git
targetRevision: main
path: apps/homepage
path: apps/audiobookshelf
destination:
server: https://kubernetes.default.svc
namespace: homepage
namespace: audiobookshelf
syncPolicy:
automated:
prune: true

View File

@@ -0,0 +1,4 @@
apiVersion: kustomize.config.k8s.io/v1beta1
kind: Kustomization
resources:
- application.yaml

View File

@@ -24,14 +24,15 @@ resources:
# simple apps
- adguard/
- audiobookshelf/
- eth-physics/
- files/
- finance/
- homeassistant/
- homepage/application.yaml
- immich/
- journal/
- media/
- minecraft/application.yaml
- monitoring/
- ntfy/
- recipes/

View File

@@ -0,0 +1,18 @@
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: minecraft-application
namespace: argocd
spec:
project: apps
source:
repoURL: ssh://git@git.kluster.moll.re:2222/remoll/k3s-infra.git
targetRevision: main
path: apps/minecraft
destination:
server: https://kubernetes.default.svc
namespace: minecraft
syncPolicy:
automated:
prune: true
selfHeal: false