Compare commits

..

1 Commits

Author SHA1 Message Date
183447835a chore(deps): update helm release manticoresearch to v7 2025-02-15 03:07:21 +00:00
15 changed files with 62 additions and 42 deletions

View File

@ -7,8 +7,8 @@ pre_control_plane_upgrade_cluster() {
kubectl label node $n 'node.kubernetes.io/kubezero.version=v1.30.6' || true
done
# patch aws-iam-authenticator DS to NOT run pods on 1.31 controllers
kubectl patch ds aws-iam-authenticator -n kube-system -p '{"spec": {"template": {"spec": {"nodeSelector": {"node.kubernetes.io/kubezero.version": "v1.30.6"}}}}}' || true
# patch aws-iam-authentiator DS to NOT run pods on 1.31 controllers
kubectl patch ds aws-iam-authentiator -p '{"spec": {"template": {"spec": {"nodeSelector": {"node.kubernetes.io/kubezero.version": "v1.30.6"}}}}}' || true
}
@ -22,12 +22,12 @@ post_control_plane_upgrade_cluster() {
pre_cluster_upgrade_final() {
if [ "$PLATFORM" == "aws" ];then
# cleanup aws-iam-authenticator
kubectl delete clusterrolebinding aws-iam-authenticator || true
kubectl delete clusterrole aws-iam-authenticator || true
kubectl delete serviceaccount aws-iam-authenticator -n kube-system || true
kubectl delete cm aws-iam-authenticator -n kube-system || true
kubectl delete ds aws-iam-authenticator -n kube-system || true
# cleanup aws-iam-authentiator
kubectl delete clusterrolebinding aws-iam-authentiator || true
kubectl delete clusterrole aws-iam-authentiator || true
kubectl delete serviceaccount aws-iam-authentiator -n kube-system || true
kubectl delete cm aws-iam-authentiator -n kube-system || true
kubectl delete ds aws-iam-authentiator -n kube-system || true
kubectl delete IAMIdentityMapping kubezero-worker-nodes || true
kubectl delete IAMIdentityMapping kubernetes-admin || true
kubectl delete crd iamidentitymappings.iamauthenticator.k8s.aws || true

View File

@ -97,7 +97,6 @@ pre_kubeadm() {
cp -r ${WORKDIR}/kubeadm/templates/apiserver ${HOSTFS}/etc/kubernetes
# copy patches to host to make --rootfs of kubeadm work
rm -f ${HOSTFS}/etc/kubernetes/patches/*
cp -r ${WORKDIR}/kubeadm/templates/patches ${HOSTFS}/etc/kubernetes
}
@ -133,7 +132,10 @@ control_plane_upgrade() {
kubectl get application kubezero -n argocd -o yaml | \
yq ".spec.source.helm.valuesObject |= load(\"$WORKDIR/kubezero-values.yaml\") | .spec.source.targetRevision = strenv(kubezero_chart_version)" \
> $WORKDIR/new-argocd-app.yaml
kubectl replace -f $WORKDIR/new-argocd-app.yaml
kubectl apply --server-side --force-conflicts -f $WORKDIR/new-argocd-app.yaml
# finally remove annotation to allow argo to sync again
kubectl patch app kubezero -n argocd --type json -p='[{"op": "remove", "path": "/metadata/annotations"}]' || true
fi
pre_kubeadm

View File

@ -8,13 +8,6 @@ import yaml
def migrate(values):
"""Actual changes here"""
# remove syncOptions from root app
try:
if values["kubezero"]["syncPolicy"]:
values["kubezero"].pop("syncPolicy")
except KeyError:
pass
return values

View File

@ -2,7 +2,7 @@ apiVersion: v2
name: kubeadm
description: KubeZero Kubeadm cluster config
type: application
version: 1.31.6
version: 1.31.5
home: https://kubezero.com
icon: https://cdn.zero-downtime.net/assets/kubezero/logo-small-64.png
keywords:
@ -11,4 +11,4 @@ keywords:
maintainers:
- name: Stefan Reimer
email: stefan@zero-downtime.net
kubeVersion: ">= 1.31.0-0"
kubeVersion: ">= 1.26.0"

View File

@ -109,7 +109,6 @@ argo-cd:
controller.status.processors: "10"
controller.operation.processors: "5"
controller.diff.server.side: "true"
controller.sync.timeout.seconds: "1800"
server.insecure: true
server.enable.gzip: true

View File

@ -18,7 +18,7 @@
"subdir": "contrib/mixin"
}
},
"version": "eb7607bd8b3665d14aa40d50435ae8c9002d620c",
"version": "8c52b414f324d6369b77096af98d8f0416fe20cb",
"sum": "XmXkOCriQIZmXwlIIFhqlJMa0e6qGWdxZD+ZDYaN0Po="
},
{
@ -78,7 +78,7 @@
"subdir": "grafana-builder"
}
},
"version": "ef841d571a704013b689368fe51e437810b6c935",
"version": "393630ca7ba9b25258c098f1fd4c81962e3ca046",
"sum": "yxqWcq/N3E/a/XreeU6EuE6X7kYPnG0AspAQFKOjASo="
},
{
@ -128,7 +128,7 @@
"subdir": "jsonnet/kube-state-metrics"
}
},
"version": "350a7c472e1801a2e13b9895ec8ef38876c96dd0",
"version": "2a95d4649b2fea55799032fb9c0b571c4ba7f776",
"sum": "3bioG7CfTfY9zeu5xU4yon6Zt3kYvNkyl492nOhQxnM="
},
{
@ -138,7 +138,7 @@
"subdir": "jsonnet/kube-state-metrics-mixin"
}
},
"version": "350a7c472e1801a2e13b9895ec8ef38876c96dd0",
"version": "2a95d4649b2fea55799032fb9c0b571c4ba7f776",
"sum": "qclI7LwucTjBef3PkGBkKxF0mfZPbHnn4rlNWKGtR4c="
},
{
@ -190,8 +190,8 @@
"subdir": "docs/node-mixin"
}
},
"version": "02afa5c53c36123611533f2defea6ccd4546a9bb",
"sum": "8dNyJ4vpnKVBbCFN9YLsugp1IjlZjDCwdKMjKi0KTG4="
"version": "11365f97bef6cb0e6259d536a7e21c49e3f5c065",
"sum": "xYj6VYFT/eafsbleNlC+Z2VfLy1CndyYrJs9BcTmnX8="
},
{
"source": {

View File

@ -42,7 +42,6 @@ Kubernetes: `>= 1.30.0-0`
| rabbitmq-cluster-operator.clusterOperator.metrics.enabled | bool | `false` | |
| rabbitmq-cluster-operator.clusterOperator.metrics.serviceMonitor.enabled | bool | `true` | |
| rabbitmq-cluster-operator.enabled | bool | `false` | |
| rabbitmq-cluster-operator.msgTopologyOperator.enabled | bool | `false` | |
| rabbitmq-cluster-operator.msgTopologyOperator.metrics.enabled | bool | `false` | |
| rabbitmq-cluster-operator.msgTopologyOperator.metrics.serviceMonitor.enabled | bool | `true` | |
| rabbitmq-cluster-operator.useCertManager | bool | `true` | |
@ -53,4 +52,4 @@ Kubernetes: `>= 1.30.0-0`
| strimzi-kafka-operator.watchAnyNamespace | bool | `true` | |
----------------------------------------------
Autogenerated from chart metadata using [helm-docs v1.8.1](https://github.com/norwoodj/helm-docs/releases/v1.8.1)
Autogenerated from chart metadata using [helm-docs v1.14.2](https://github.com/norwoodj/helm-docs/releases/v1.14.2)

View File

@ -1,4 +1,3 @@
{{- if index .Values "cloudnative-pg" "enabled" }}
apiVersion: postgresql.cnpg.io/v1
kind: ClusterImageCatalog
metadata:
@ -15,4 +14,3 @@ spec:
image: ghcr.io/cloudnative-pg/postgresql:16.6-33-bookworm@sha256:7dfda49485274b61ada9bb347caffac01dee442ffd119eb19317a2692347657b
- major: 17
image: ghcr.io/cloudnative-pg/postgresql:17.2-33-bookworm@sha256:52b78e8e4a297e268be168c7e107a2117072dc38f4a11d9d056ff0cc13d4007f
{{- end }}

View File

@ -2,7 +2,7 @@ apiVersion: v2
name: kubezero
description: KubeZero - Root App of Apps chart
type: application
version: 1.31.6
version: 1.31.5
home: https://kubezero.com
icon: https://cdn.zero-downtime.net/assets/kubezero/logo-small-64.png
keywords:

View File

@ -0,0 +1,30 @@
# Skeleton template to put into each cluster git folder
apiVersion: argoproj.io/v1alpha1
kind: Application
metadata:
name: kubezero
namespace: argocd
spec:
project: kubezero
source:
repoURL: https://cdn.zero-downtime.net/charts
chart: kubezero
targetRevision: {{ .Values.kubezero.version }}
helm:
parameters:
# We use this to detect if we are called from ArgoCD
- name: argocdAppName
value: $ARGOCD_APP_NAME
# This breaks the recursion, otherwise we install another kubezero project and app
- name: installKubeZero
value: "false"
values: |
{{- toYaml .Values | nindent 8 }}
destination:
server: https://kubernetes.default.svc
namespace: argocd
syncPolicy:
automated:
prune: true

View File

@ -33,14 +33,14 @@ spec:
server: {{ .Values.kubezero.server }}
namespace: {{ default "kube-system" ( index .Values $name "namespace" ) }}
revisionHistoryLimit: 2
syncPolicy:
automated:
prune: true
syncOptions:
- ServerSideApply=true
- CreateNamespace=true
- ApplyOutOfSyncOnly=true
{{- with .Values.kubezero.syncPolicy }}
{{- toYaml . | nindent 4 }}
{{- end }}
{{- include (print $name "-argo") $ }}
{{- end }}

View File

@ -61,8 +61,7 @@ argocd-apps:
namespace: argocd
syncPolicy:
automated:
prune: true
{{- toYaml (default dict .Values.kubezero.syncPolicy) | nindent 8 }}
argocd-image-updater:
enabled: {{ default "false" (index .Values "argo" "argocd-image-updater" "enabled") }}

View File

@ -49,7 +49,7 @@ cert-manager:
{{- if eq .Values.global.platform "gke" }}
serviceAccount:
annotations:
iam.gke.io/gcp-service-account: "dns01-solver-cert-manager@{{ .Values.global.gcp.projectId }}.iam.gserviceaccount.com"
iam.gke.io/gcp-service-account: "dns01-solver@{{ .Values.global.gcp.projectId }}.iam.gserviceaccount.com"
{{- end }}
prometheus:

View File

@ -2,7 +2,7 @@ apiVersion: v2
name: manticore
description: Chart for Manticore
type: application
version: 5.0.25
version: 5.0.26
appVersion: 5.0.25
home: https://kubezero.com
icon: https://cdn.zero-downtime.net/assets/kubezero/logo-small-64.png
@ -17,6 +17,6 @@ dependencies:
version: ">= 0.1.4"
repository: https://cdn.zero-downtime.net/charts
- name: manticoresearch
version: "5.0.25"
version: "7.0.1"
repository: https://helm.manticoresearch.com
kubeVersion: ">= 1.20.0"

View File

@ -10,8 +10,8 @@
## Version upgrades
- cilium 1.16.6
- istio 1.24.2
- ArgoCD 2.14.3 [custom ZDT image](https://git.zero-downtime.net/ZeroDownTime/zdt-argocd)
- istio 1.24.3
- ArgoCD 2.13.1 [custom ZDT image](https://git.zero-downtime.net/ZeroDownTime/zdt-argocd)
- Prometheus 3.1.0 / Grafana 11.5.1
- Nvidia container toolkit 1.17, drivers 565.57.01, Cuda 12.7