Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/logs/tls-issue-cert-manager-8-0.log Warning: version difference between client (1.36) and server (1.33) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.36) and server (1.33) exceeds the supported minor version skew of +/-1 + main + create_infra tls-issue-cert-manager-1530 + local ns=tls-issue-cert-manager-1530 + '[' -n pxc-operator ']' + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get pxc --all-namespaces -o wide + kubectl patch pxc -n tls-issue-cert-manager-5591 some-name-tls-issue --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issue patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.afrvba7J4x ++ mktemp + local LAST_ERR=/tmp/tmp.WdXi52U6qi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.afrvba7J4x perconaxtradbcluster.pxc.percona.com "some-name-tls-issue" deleted from tls-issue-cert-manager-5591 namespace + cat /tmp/tmp.WdXi52U6qi + rm /tmp/tmp.afrvba7J4x /tmp/tmp.WdXi52U6qi + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.n69Yp1GtDb ++ mktemp + local LAST_ERR=/tmp/tmp.X7nzrkA9X4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.n69Yp1GtDb No resources found + cat /tmp/tmp.X7nzrkA9X4 + rm /tmp/tmp.n69Yp1GtDb /tmp/tmp.X7nzrkA9X4 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.4RGNUqWaOy ++ mktemp + local LAST_ERR=/tmp/tmp.aGWWM3AYci + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4RGNUqWaOy No resources found + cat /tmp/tmp.aGWWM3AYci + rm /tmp/tmp.4RGNUqWaOy /tmp/tmp.aGWWM3AYci + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ tail -n1 ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl api-resources ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrolebinding + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + awk '{print$1}' + xargs kubectl delete ns ++ mktemp + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + kubectl_bin get ns + local LAST_OUT=/tmp/tmp.M5tRKvswCS ++ mktemp + local LAST_OUT=/tmp/tmp.tGWZkDGUFK ++ mktemp + local LAST_ERR=/tmp/tmp.j089TON3tx + local exit_status=0 ++ seq 0 2 ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + local LAST_ERR=/tmp/tmp.H7788anE7r + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tGWZkDGUFK + cat /tmp/tmp.H7788anE7r + rm /tmp/tmp.tGWZkDGUFK /tmp/tmp.H7788anE7r + return 0 namespace "cert-manager" deleted namespace "tls-issue-cert-manager-5591" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M5tRKvswCS namespace "pxc-operator" deleted + cat /tmp/tmp.j089TON3tx + rm /tmp/tmp.M5tRKvswCS /tmp/tmp.j089TON3tx + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.2PtGiMx0PW ++ mktemp + local LAST_ERR=/tmp/tmp.pEAkgXIQSA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2PtGiMx0PW namespace/pxc-operator created + cat /tmp/tmp.pEAkgXIQSA + rm /tmp/tmp.2PtGiMx0PW /tmp/tmp.pEAkgXIQSA + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.cc8wPLaHj2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.M78Er6asZj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cc8wPLaHj2 ++ cat /tmp/tmp.M78Er6asZj ++ rm /tmp/tmp.cc8wPLaHj2 /tmp/tmp.M78Er6asZj ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2473-6d392bea-4-cluster7 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.O4XOeW2QOR ++ mktemp + local LAST_ERR=/tmp/tmp.BWb3MMx11Z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2473-6d392bea-4-cluster7 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.O4XOeW2QOR Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2473-6d392bea-4-cluster7" modified. + cat /tmp/tmp.BWb3MMx11Z + rm /tmp/tmp.O4XOeW2QOR /tmp/tmp.BWb3MMx11Z + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.iQc6i0Foez ++ mktemp + local LAST_ERR=/tmp/tmp.FaDiYOIoQH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iQc6i0Foez customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.FaDiYOIoQH + rm /tmp/tmp.iQc6i0Foez /tmp/tmp.FaDiYOIoQH + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.DLXqkV1MaL ++ mktemp + local LAST_ERR=/tmp/tmp.r4ppILrczC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DLXqkV1MaL clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.r4ppILrczC + rm /tmp/tmp.DLXqkV1MaL /tmp/tmp.r4ppILrczC + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/deploy/cw-operator.yaml + kubectl_bin apply -f - + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2473-6d392bea^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "PXCO_FEATURE_GATES").value) = ""' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + local LAST_OUT=/tmp/tmp.W6dEd9WmdC ++ mktemp + local LAST_ERR=/tmp/tmp.b3dpFwzsPa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.W6dEd9WmdC deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.b3dpFwzsPa + rm /tmp/tmp.W6dEd9WmdC /tmp/tmp.b3dpFwzsPa + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.nTvteagi3m ++ mktemp + local LAST_ERR=/tmp/tmp.RKLoqjJgdx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nTvteagi3m pod/percona-xtradb-cluster-operator-55d95dc9d8-6njnz condition met + cat /tmp/tmp.RKLoqjJgdx E0517 02:02:29.473518 28085 reflector.go:227] "Failed to watch" err="Get \"https://35.202.69.227/api/v1/namespaces/pxc-operator/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpercona-xtradb-cluster-operator-55d95dc9d8-6njnz&resourceVersion=1778983349022780000&timeoutSeconds=311&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" + rm /tmp/tmp.nTvteagi3m /tmp/tmp.RKLoqjJgdx + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ head -1 ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sTRdfdc3Ep +++ mktemp ++ local LAST_ERR=/tmp/tmp.fPW5OPIHgK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sTRdfdc3Ep ++ cat /tmp/tmp.fPW5OPIHgK ++ rm /tmp/tmp.sTRdfdc3Ep /tmp/tmp.fPW5OPIHgK ++ return 0 + wait_pod percona-xtradb-cluster-operator-55d95dc9d8-6njnz 480 pxc-operator + local pod=percona-xtradb-cluster-operator-55d95dc9d8-6njnz + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-55d95dc9d8-6njnz ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-55d95dc9d8-6njnz condition met E0517 02:02:35.391039 28910 reflector.go:227] "Failed to watch" err="Get \"https://35.202.69.227/api/v1/namespaces/pxc-operator/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpercona-xtradb-cluster-operator-55d95dc9d8-6njnz&resourceVersion=1778983352725476000&timeoutSeconds=579&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/percona-xtradb-cluster-operator-55d95dc9d8-6njnz to become Ready.Ok + sleep 3 + create_namespace tls-issue-cert-manager-1530 + local namespace=tls-issue-cert-manager-1530 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrolebinding ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrole ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces tls-issue-cert-manager-1530' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces tls-issue-cert-manager-1530 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace tls-issue-cert-manager-1530 + awk '{print$1}' + xargs kubectl delete ns ++ mktemp + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + kubectl_bin get ns + local LAST_OUT=/tmp/tmp.f84TqcAc8v ++ mktemp + local LAST_OUT=/tmp/tmp.6jYww5Yw1X ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.N6ApbWm2vw + local exit_status=0 + local LAST_ERR=/tmp/tmp.EPuWRl3txI + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-1530 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-1530 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6jYww5Yw1X + cat /tmp/tmp.EPuWRl3txI + rm /tmp/tmp.6jYww5Yw1X /tmp/tmp.EPuWRl3txI + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-1530 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.f84TqcAc8v + cat /tmp/tmp.N6ApbWm2vw Error from server (NotFound): namespaces "tls-issue-cert-manager-1530" not found + rm /tmp/tmp.f84TqcAc8v /tmp/tmp.N6ApbWm2vw + return 1 + : + wait_for_delete namespace/tls-issue-cert-manager-1530 + local res=namespace/tls-issue-cert-manager-1530 + echo -n 'waiting for namespace/tls-issue-cert-manager-1530 to be deleted' waiting for namespace/tls-issue-cert-manager-1530 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "tls-issue-cert-manager-1530" not found + desc 'create namespace tls-issue-cert-manager-1530' + set +o xtrace ----------------------------------------------------------------------------------- create namespace tls-issue-cert-manager-1530 ----------------------------------------------------------------------------------- + kubectl_bin create namespace tls-issue-cert-manager-1530 ++ mktemp + local LAST_OUT=/tmp/tmp.aotCvndpUK ++ mktemp + local LAST_ERR=/tmp/tmp.GY6xEpAJDM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace tls-issue-cert-manager-1530 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aotCvndpUK namespace/tls-issue-cert-manager-1530 created + cat /tmp/tmp.GY6xEpAJDM + rm /tmp/tmp.aotCvndpUK /tmp/tmp.GY6xEpAJDM + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.VmJDSQPuCN +++ mktemp ++ local LAST_ERR=/tmp/tmp.64osYskV6p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VmJDSQPuCN ++ cat /tmp/tmp.64osYskV6p ++ rm /tmp/tmp.VmJDSQPuCN /tmp/tmp.64osYskV6p ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2473-6d392bea-4-cluster7 --namespace=tls-issue-cert-manager-1530 ++ mktemp + local LAST_OUT=/tmp/tmp.EPMHPeHYUV ++ mktemp + local LAST_ERR=/tmp/tmp.qtBODr2EcT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2473-6d392bea-4-cluster7 --namespace=tls-issue-cert-manager-1530 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EPMHPeHYUV Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2473-6d392bea-4-cluster7" modified. + cat /tmp/tmp.qtBODr2EcT + rm /tmp/tmp.EPMHPeHYUV /tmp/tmp.qtBODr2EcT + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.bEeWypcsNI ++ mktemp + local LAST_ERR=/tmp/tmp.Bb2fCWxLy5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bEeWypcsNI secret/minio-secret created secret/aws-s3-secret created secret/do-spaces-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.Bb2fCWxLy5 + rm /tmp/tmp.bEeWypcsNI /tmp/tmp.Bb2fCWxLy5 + return 0 + cluster=some-name-tls-issue + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.G1Gt83A1j0 ++ mktemp + local LAST_ERR=/tmp/tmp.oBl6S5hsaF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.G1Gt83A1j0 namespace/cert-manager created + cat /tmp/tmp.oBl6S5hsaF + rm /tmp/tmp.G1Gt83A1j0 /tmp/tmp.oBl6S5hsaF + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.kmAhDpqdiB ++ mktemp + local LAST_ERR=/tmp/tmp.s00fSAOnAB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kmAhDpqdiB namespace/cert-manager labeled + cat /tmp/tmp.s00fSAOnAB + rm /tmp/tmp.kmAhDpqdiB /tmp/tmp.s00fSAOnAB + return 0 + kubectl_bin apply -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.BB8EaNytYH ++ mktemp + local LAST_ERR=/tmp/tmp.yv19qcpiaQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BB8EaNytYH namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-tokenrequest created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager-tokenrequest created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager-cainjector created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.yv19qcpiaQ Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.BB8EaNytYH /tmp/tmp.yv19qcpiaQ + return 0 + '[' '' == 4.10 ']' + sleep 70 + desc 'wait for cert-manager to be ready' + set +o xtrace ----------------------------------------------------------------------------------- wait for cert-manager to be ready ----------------------------------------------------------------------------------- + kubectl_bin -n cert-manager wait --for=condition=Available deployment/cert-manager --timeout=180s ++ mktemp + local LAST_OUT=/tmp/tmp.iUhFNEXUoj ++ mktemp + local LAST_ERR=/tmp/tmp.PwluCuC669 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n cert-manager wait --for=condition=Available deployment/cert-manager --timeout=180s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iUhFNEXUoj deployment.apps/cert-manager condition met + cat /tmp/tmp.PwluCuC669 E0517 02:04:41.860519 14321 reflector.go:227] "Failed to watch" err="Get \"https://35.202.69.227/apis/apps/v1/namespaces/cert-manager/deployments?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dcert-manager&resourceVersion=1778983479526216000&timeoutSeconds=578&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" + rm /tmp/tmp.iUhFNEXUoj /tmp/tmp.PwluCuC669 + return 0 + kubectl_bin -n cert-manager wait --for=condition=Available deployment/cert-manager-cainjector --timeout=180s ++ mktemp + local LAST_OUT=/tmp/tmp.1bIiaQWrSG ++ mktemp + local LAST_ERR=/tmp/tmp.MrjAINx1JR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n cert-manager wait --for=condition=Available deployment/cert-manager-cainjector --timeout=180s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1bIiaQWrSG deployment.apps/cert-manager-cainjector condition met + cat /tmp/tmp.MrjAINx1JR E0517 02:04:44.104535 14741 reflector.go:227] "Failed to watch" err="Get \"https://35.202.69.227/apis/apps/v1/namespaces/cert-manager/deployments?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dcert-manager-cainjector&resourceVersion=1778983479526216000&timeoutSeconds=360&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" + rm /tmp/tmp.1bIiaQWrSG /tmp/tmp.MrjAINx1JR + return 0 + kubectl_bin -n cert-manager wait --for=condition=Available deployment/cert-manager-webhook --timeout=180s ++ mktemp + local LAST_OUT=/tmp/tmp.7yLIUkEcfj ++ mktemp + local LAST_ERR=/tmp/tmp.HwQGFg3Uuq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl -n cert-manager wait --for=condition=Available deployment/cert-manager-webhook --timeout=180s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7yLIUkEcfj deployment.apps/cert-manager-webhook condition met + cat /tmp/tmp.HwQGFg3Uuq E0517 02:04:45.177700 15089 reflector.go:227] "Failed to watch" err="Get \"https://35.202.69.227/apis/apps/v1/namespaces/cert-manager/deployments?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dcert-manager-webhook&resourceVersion=1778983484526284000&timeoutSeconds=540&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" + rm /tmp/tmp.7yLIUkEcfj /tmp/tmp.HwQGFg3Uuq + return 0 + local cm_ready=0 ++ seq 1 30 + for cm_try in '$(seq 1 30)' + kubectl_bin apply -f - + echo '{"apiVersion":"cert-manager.io/v1","kind":"Issuer","metadata":{"name":"cert-manager-readiness-check"},"spec":{"selfSigned":{}}}' issuer.cert-manager.io/cert-manager-readiness-check created + kubectl_bin delete issuer cert-manager-readiness-check --ignore-not-found ++ mktemp + local LAST_OUT=/tmp/tmp.jnTCaWkHNe ++ mktemp + local LAST_ERR=/tmp/tmp.bi0nRTOlnR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete issuer cert-manager-readiness-check --ignore-not-found + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jnTCaWkHNe issuer.cert-manager.io "cert-manager-readiness-check" deleted from tls-issue-cert-manager-1530 namespace + cat /tmp/tmp.bi0nRTOlnR + rm /tmp/tmp.jnTCaWkHNe /tmp/tmp.bi0nRTOlnR + return 0 + cm_ready=1 + break + '[' 1 -ne 1 ']' + desc 'create pxc cluster' + set +o xtrace ----------------------------------------------------------------------------------- create pxc cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/conf/secrets_without_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.1kj6GLLRrQ ++ mktemp + local LAST_ERR=/tmp/tmp.jhQHAxCItf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/conf/secrets_without_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1kj6GLLRrQ secret/my-cluster-secrets created + cat /tmp/tmp.jhQHAxCItf + rm /tmp/tmp.1kj6GLLRrQ /tmp/tmp.jhQHAxCItf + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/conf/client.yml + local pvc_name= + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' ++ mktemp + local LAST_OUT=/tmp/tmp.spkvbDHGQE + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/conf/client.yml + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2473-6d392bea#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-1530~ + local LAST_ERR=/tmp/tmp.sM4MkyrBrB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.spkvbDHGQE deployment.apps/pxc-client created + cat /tmp/tmp.sM4MkyrBrB + rm /tmp/tmp.spkvbDHGQE /tmp/tmp.sM4MkyrBrB + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + local pvc_name= + kubectl_bin apply -f - + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-1530~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + local LAST_OUT=/tmp/tmp.X6eQx2SI7o + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2473-6d392bea#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml ++ mktemp + local LAST_ERR=/tmp/tmp.devyJO73BK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.X6eQx2SI7o perconaxtradbcluster.pxc.percona.com/some-name-tls-issue created + cat /tmp/tmp.devyJO73BK + rm /tmp/tmp.X6eQx2SI7o /tmp/tmp.devyJO73BK + return 0 + desc 'wait for cluster to be ready' + set +o xtrace ----------------------------------------------------------------------------------- wait for cluster to be ready ----------------------------------------------------------------------------------- + wait_for_running some-name-tls-issue-haproxy 1 + local name=some-name-tls-issue-haproxy + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-haproxy-0 480 + local pod=some-name-tls-issue-haproxy-0 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo some-name-tls-issue-haproxy-0 + local container= + set +o xtrace Error from server (NotFound): pods "some-name-tls-issue-haproxy-0" not found waiting for pod/some-name-tls-issue-haproxy-0 to become Ready...................Ok + wait_for_running some-name-tls-issue-pxc 3 + local name=some-name-tls-issue-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-0 480 + local pod=some-name-tls-issue-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-0 condition met E0517 02:06:13.633144 27174 reflector.go:227] "Failed to watch" err="Get \"https://35.202.69.227/api/v1/namespaces/tls-issue-cert-manager-1530/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dsome-name-tls-issue-pxc-0&resourceVersion=1778983572482031012&timeoutSeconds=520&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/some-name-tls-issue-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-1 480 + local pod=some-name-tls-issue-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-1 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-1 condition met waiting for pod/some-name-tls-issue-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-2 480 + local pod=some-name-tls-issue-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-2 condition met waiting for pod/some-name-tls-issue-pxc-2 to become Ready.Ok + wait_cluster_consistency some-name-tls-issue 3 2 + local cluster_name=some-name-tls-issue + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name-tls-issue to be ready' waiting for pxc/some-name-tls-issue to be ready++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CXQuc8wjJh +++ mktemp ++ local LAST_ERR=/tmp/tmp.PHBrOOGH1b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CXQuc8wjJh ++ cat /tmp/tmp.PHBrOOGH1b ++ rm /tmp/tmp.CXQuc8wjJh /tmp/tmp.PHBrOOGH1b ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.94kyeOa3El +++ mktemp ++ local LAST_ERR=/tmp/tmp.vi7VsywIoN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.94kyeOa3El ++ cat /tmp/tmp.vi7VsywIoN ++ rm /tmp/tmp.94kyeOa3El /tmp/tmp.vi7VsywIoN ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issue +++ local cluster_name=some-name-tls-issue ++++ get_proxy some-name-tls-issue ++++ local target_cluster=some-name-tls-issue +++++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.WvLA8XlDC3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.s9nz7sYBLf +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.WvLA8XlDC3 +++++ cat /tmp/tmp.s9nz7sYBLf +++++ rm /tmp/tmp.WvLA8XlDC3 /tmp/tmp.s9nz7sYBLf +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issue-haproxy ++++ return +++ local cluster_proxy=some-name-tls-issue-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z1GiskhTgw +++ mktemp ++ local LAST_ERR=/tmp/tmp.oJfLjsGZIL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z1GiskhTgw ++ cat /tmp/tmp.oJfLjsGZIL ++ rm /tmp/tmp.Z1GiskhTgw /tmp/tmp.oJfLjsGZIL ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check if certificates issued with certmanager' + set +o xtrace ----------------------------------------------------------------------------------- check if certificates issued with certmanager ----------------------------------------------------------------------------------- + tlsSecretsShouldExist some-name-tls-issue-ssl + local secretName=some-name-tls-issue-ssl + checkTLSSecret some-name-tls-issue-ssl ca.crt + local secretName=some-name-tls-issue-ssl + local dataKey=ca.crt ++ kubectl_bin get secrets/some-name-tls-issue-ssl -o json ++ jq '.data["ca.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sNAb2DsybX +++ mktemp ++ local LAST_ERR=/tmp/tmp.oUSM87QP0J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issue-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sNAb2DsybX ++ cat /tmp/tmp.oUSM87QP0J ++ rm /tmp/tmp.sNAb2DsybX /tmp/tmp.oUSM87QP0J ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURFakNDQWZxZ0F3SUJBZ0lVQWo2V1FpOEhNczZiejJwUzZsVk5uSi9DQ1VFd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0lURWZNQjBHQTFVRUF4TVdjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaUzFqWVRBZUZ3MHlOakExTVRjdwpNakExTUROYUZ3MHlPREF6TWpVeE1EQTFNRE5hTUNFeEh6QWRCZ05WQkFNVEZuTnZiV1V0Ym1GdFpTMTBiSE10CmFYTnpkV1V0WTJFd2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUJEd0F3Z2dFS0FvSUJBUUNnUy90REgwc3IKS0g4U3ZUNzNaQnFOc1crdVVuc1hRaTBXUUVRdXV4QVpvV2lSamJwdnlPUytyaGZpbU0wcm1EK21qU3ZjQWZlYgpVbDR6cVMzV25MU2JMU2tRS0xXNFFwVCtkS2Q1R3JTdzZxMmpuOVRTdy82TVpHV2VYNXdQT0lCRFp1TVZuZks0CktOa1VYNndBbENiQVBkQzR4THk0Nzd0WGdHV3lJU3V4YW1qckFsL2pMNjVXQy9Db0pTYWVQZmQ5aW01cHM0UTIKR0VLa0NEbWczcm9QWTh4ZlYyNlUxRk9qQU1haG1nVHNROG41Ymp0ZWxpNjlKbHZhMVFaaXM1VGIzMXo2T2RxbQpSd25peWVoMXdJNnJsMlZubFZXKzhWWE1kem85ckY2TFBWWWhZRlMrMVN5bC95YUx3eFdIVzBPRHVud1pPRGVpClZlUUVsUzNKV3ZYdkFnTUJBQUdqUWpCQU1BNEdBMVVkRHdFQi93UUVBd0lDcERBUEJnTlZIUk1CQWY4RUJUQUQKQVFIL01CMEdBMVVkRGdRV0JCVGsxRHY1bFFCakt5clBscTg0T21Ia2xYSUFpakFOQmdrcWhraUc5dzBCQVFzRgpBQU9DQVFFQVFBaUVsbTBTWmhER0ZzdUlTdDFFcVAzR1MrdHhKVVdTOGR4Q3dDQ1NoOVRCUFpCaUs2VXYza296CjJsNG9Sc2dvSWRqOXZrY2NhMloyNW83QU1oN2laODdrZlZzQkVCVjZjbDdwUWVVZzc1bEpXbkY4VkkvcHpCM0oKbkRucSt1dUd5aXBZZGgrek9ONG4zVkhHaDlDeVRBWkFFL0w0SWthZ0ZVc2EzTlVjeW1xMUZvY25WYkRrVGhnTgpaOGl1RVY0M2lWdU9VYmIrd2RWZ1F0dmMvMnh2dkNGRnk1NkJQSVhTOWNJcWN0c0I4L0tLSC83Zk1ab3NiVCt0CkVrZDVrNnYzNld2L2dzZ2hNVHFQQzdwbXhFcG9YSVd3RktnRXo0eUt5Vi9STUVxSWxVcnVhcER0MnE4NWhsWUEKV2tLd1pxOEw0TER5andoaGdycDAxYnBhVGsyNFR3PT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURFakNDQWZxZ0F3SUJBZ0lVQWo2V1FpOEhNczZiejJwUzZsVk5uSi9DQ1VFd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0lURWZNQjBHQTFVRUF4TVdjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaUzFqWVRBZUZ3MHlOakExTVRjdwpNakExTUROYUZ3MHlPREF6TWpVeE1EQTFNRE5hTUNFeEh6QWRCZ05WQkFNVEZuTnZiV1V0Ym1GdFpTMTBiSE10CmFYTnpkV1V0WTJFd2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUJEd0F3Z2dFS0FvSUJBUUNnUy90REgwc3IKS0g4U3ZUNzNaQnFOc1crdVVuc1hRaTBXUUVRdXV4QVpvV2lSamJwdnlPUytyaGZpbU0wcm1EK21qU3ZjQWZlYgpVbDR6cVMzV25MU2JMU2tRS0xXNFFwVCtkS2Q1R3JTdzZxMmpuOVRTdy82TVpHV2VYNXdQT0lCRFp1TVZuZks0CktOa1VYNndBbENiQVBkQzR4THk0Nzd0WGdHV3lJU3V4YW1qckFsL2pMNjVXQy9Db0pTYWVQZmQ5aW01cHM0UTIKR0VLa0NEbWczcm9QWTh4ZlYyNlUxRk9qQU1haG1nVHNROG41Ymp0ZWxpNjlKbHZhMVFaaXM1VGIzMXo2T2RxbQpSd25peWVoMXdJNnJsMlZubFZXKzhWWE1kem85ckY2TFBWWWhZRlMrMVN5bC95YUx3eFdIVzBPRHVud1pPRGVpClZlUUVsUzNKV3ZYdkFnTUJBQUdqUWpCQU1BNEdBMVVkRHdFQi93UUVBd0lDcERBUEJnTlZIUk1CQWY4RUJUQUQKQVFIL01CMEdBMVVkRGdRV0JCVGsxRHY1bFFCakt5clBscTg0T21Ia2xYSUFpakFOQmdrcWhraUc5dzBCQVFzRgpBQU9DQVFFQVFBaUVsbTBTWmhER0ZzdUlTdDFFcVAzR1MrdHhKVVdTOGR4Q3dDQ1NoOVRCUFpCaUs2VXYza296CjJsNG9Sc2dvSWRqOXZrY2NhMloyNW83QU1oN2laODdrZlZzQkVCVjZjbDdwUWVVZzc1bEpXbkY4VkkvcHpCM0oKbkRucSt1dUd5aXBZZGgrek9ONG4zVkhHaDlDeVRBWkFFL0w0SWthZ0ZVc2EzTlVjeW1xMUZvY25WYkRrVGhnTgpaOGl1RVY0M2lWdU9VYmIrd2RWZ1F0dmMvMnh2dkNGRnk1NkJQSVhTOWNJcWN0c0I4L0tLSC83Zk1ab3NiVCt0CkVrZDVrNnYzNld2L2dzZ2hNVHFQQzdwbXhFcG9YSVd3RktnRXo0eUt5Vi9STUVxSWxVcnVhcER0MnE4NWhsWUEKV2tLd1pxOEw0TER5andoaGdycDAxYnBhVGsyNFR3PT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' ']' + checkTLSSecret some-name-tls-issue-ssl tls.crt + local secretName=some-name-tls-issue-ssl + local dataKey=tls.crt ++ jq '.data["tls.crt"]' ++ kubectl_bin get secrets/some-name-tls-issue-ssl -o json +++ mktemp ++ local LAST_OUT=/tmp/tmp.pvf7NopFu3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AYkBzUMdQn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issue-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pvf7NopFu3 ++ cat /tmp/tmp.AYkBzUMdQn ++ rm /tmp/tmp.pvf7NopFu3 /tmp/tmp.AYkBzUMdQn ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURvVENDQW9tZ0F3SUJBZ0lVYjA4b3Fjd0hjSHJYVWk1UmF0Vi8zSlhORHRZd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0lURWZNQjBHQTFVRUF4TVdjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaUzFqWVRBZUZ3MHlOakExTVRjdwpNakExTURkYUZ3MHlOakEzTURReE1EQTFNRGRhTUNjeEpUQWpCZ05WQkFNVEhITnZiV1V0Ym1GdFpTMTBiSE10CmFYTnpkV1V0Y0hKdmVIbHpjV3d3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRRGcKVlRDeG9GR1ZobEY2K3JiTkVydUw2bXA2aFJ0QmJUNTJaa0ZrSlRoRnhVVEZHWXVpb2hudVhhQkJvaktyUDY1dApqTXRqSmgrMFBXVDNaREdmalVzTlNMa3R1bnBFbG5lUlVYNjVrblozRXZ0RHVJYktMWXQwcFU4M2pFaDNTdnF5CjliaUIzREFSV09Bbkx6L09xSDRwMUsrZjdSWElHZ3JwekNZNDYvTFVjU3pDVTFTam5LdzN0dUxjRmpPeVJPcCsKeVI1YnQ3dGk2T0FBVjhxTFFvVTNzM2NJcW1udk1ad1RESlNNMWg2K1gxVFdNVlVwTW9uRTVZRHJlZTdDcjFuVApSU1p2MVVPRmVTbUQ1Zm05R0wxZ20yNXA1T3REazVLNjNGRFZ4TXE0N1ZuZksxVDN3S3pGQ3M1N0I5b01mdlJpCmZ0RjhwaWxtVVkxNmdsRTkyTGtCQWdNQkFBR2pnY293Z2Njd0RnWURWUjBQQVFIL0JBUURBZ1dnTUF3R0ExVWQKRXdFQi93UUNNQUF3SHdZRFZSMGpCQmd3Rm9BVTVOUTcrWlVBWXlzcXo1YXZPRHBoNUpWeUFJb3dnWVVHQTFVZApFUVIrTUh5Q0YzTnZiV1V0Ym1GdFpTMTBiSE10YVhOemRXVXRjSGhqZ2h4emIyMWxMVzVoYldVdGRHeHpMV2x6CmMzVmxMWEJ5YjNoNWMzRnNnaGtxTG5OdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdVdGNIaGpnaDRxTG5OdmJXVXQKYm1GdFpTMTBiSE10YVhOemRXVXRjSEp2ZUhsemNXeUNDSFJsYzNRdVkyOXRNQTBHQ1NxR1NJYjNEUUVCQ3dVQQpBNElCQVFBaG9talRIOGIyNHpqQ0xFR1RHNU1GYlhrNXg0RjM1WHJyTTlXZ3JSL2ZKTXc4SnNIRFZyUzVCdUc0CnV4eXpFckV5cDZCSnZBeXhvbHdkN0k4KzdnL0x2Z2t5UDh0a3laQVBpQXk3aFFVdTRRUXEvNmlTSHlDWXBFK3QKVDlvUFkvVTJBN0J2bmpzaHB0blF1WjVEMzcxMjYzWXpncmZHMWNXclR4QmZIY1RkVzhHa1IvWjhOdHU2U2UzagorVG9PQVBCL1IzZEdUN0hveWJVQjdQYkc2ZituajZ1cTBjTWpYSFFwYXZYNTBZbHlObmcxUm9jVlpnVnMveTVWCjdobVlVVzJnSFNTTDhFb3VVWWVkVUpzL3RaSHMzUXUyVFJQODNQblpsbDVVK1NDelZMdUZ2K3BmYktqcnhTTloKejl6UzZzVmx6cExBbGJHRlB4YTZ0TlI4SExhOQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURvVENDQW9tZ0F3SUJBZ0lVYjA4b3Fjd0hjSHJYVWk1UmF0Vi8zSlhORHRZd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0lURWZNQjBHQTFVRUF4TVdjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaUzFqWVRBZUZ3MHlOakExTVRjdwpNakExTURkYUZ3MHlOakEzTURReE1EQTFNRGRhTUNjeEpUQWpCZ05WQkFNVEhITnZiV1V0Ym1GdFpTMTBiSE10CmFYTnpkV1V0Y0hKdmVIbHpjV3d3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRRGcKVlRDeG9GR1ZobEY2K3JiTkVydUw2bXA2aFJ0QmJUNTJaa0ZrSlRoRnhVVEZHWXVpb2hudVhhQkJvaktyUDY1dApqTXRqSmgrMFBXVDNaREdmalVzTlNMa3R1bnBFbG5lUlVYNjVrblozRXZ0RHVJYktMWXQwcFU4M2pFaDNTdnF5CjliaUIzREFSV09Bbkx6L09xSDRwMUsrZjdSWElHZ3JwekNZNDYvTFVjU3pDVTFTam5LdzN0dUxjRmpPeVJPcCsKeVI1YnQ3dGk2T0FBVjhxTFFvVTNzM2NJcW1udk1ad1RESlNNMWg2K1gxVFdNVlVwTW9uRTVZRHJlZTdDcjFuVApSU1p2MVVPRmVTbUQ1Zm05R0wxZ20yNXA1T3REazVLNjNGRFZ4TXE0N1ZuZksxVDN3S3pGQ3M1N0I5b01mdlJpCmZ0RjhwaWxtVVkxNmdsRTkyTGtCQWdNQkFBR2pnY293Z2Njd0RnWURWUjBQQVFIL0JBUURBZ1dnTUF3R0ExVWQKRXdFQi93UUNNQUF3SHdZRFZSMGpCQmd3Rm9BVTVOUTcrWlVBWXlzcXo1YXZPRHBoNUpWeUFJb3dnWVVHQTFVZApFUVIrTUh5Q0YzTnZiV1V0Ym1GdFpTMTBiSE10YVhOemRXVXRjSGhqZ2h4emIyMWxMVzVoYldVdGRHeHpMV2x6CmMzVmxMWEJ5YjNoNWMzRnNnaGtxTG5OdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdVdGNIaGpnaDRxTG5OdmJXVXQKYm1GdFpTMTBiSE10YVhOemRXVXRjSEp2ZUhsemNXeUNDSFJsYzNRdVkyOXRNQTBHQ1NxR1NJYjNEUUVCQ3dVQQpBNElCQVFBaG9talRIOGIyNHpqQ0xFR1RHNU1GYlhrNXg0RjM1WHJyTTlXZ3JSL2ZKTXc4SnNIRFZyUzVCdUc0CnV4eXpFckV5cDZCSnZBeXhvbHdkN0k4KzdnL0x2Z2t5UDh0a3laQVBpQXk3aFFVdTRRUXEvNmlTSHlDWXBFK3QKVDlvUFkvVTJBN0J2bmpzaHB0blF1WjVEMzcxMjYzWXpncmZHMWNXclR4QmZIY1RkVzhHa1IvWjhOdHU2U2UzagorVG9PQVBCL1IzZEdUN0hveWJVQjdQYkc2ZituajZ1cTBjTWpYSFFwYXZYNTBZbHlObmcxUm9jVlpnVnMveTVWCjdobVlVVzJnSFNTTDhFb3VVWWVkVUpzL3RaSHMzUXUyVFJQODNQblpsbDVVK1NDelZMdUZ2K3BmYktqcnhTTloKejl6UzZzVmx6cExBbGJHRlB4YTZ0TlI4SExhOQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' ']' + checkTLSSecret some-name-tls-issue-ssl tls.key + local secretName=some-name-tls-issue-ssl + local dataKey=tls.key ++ kubectl_bin get secrets/some-name-tls-issue-ssl -o json ++ jq '.data["tls.key"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4L0Yl7ZLux +++ mktemp ++ local LAST_ERR=/tmp/tmp.xuOgxm7LQi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issue-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4L0Yl7ZLux ++ cat /tmp/tmp.xuOgxm7LQi ++ rm /tmp/tmp.4L0Yl7ZLux /tmp/tmp.xuOgxm7LQi ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb2dJQkFBS0NBUUVBNEZVd3NhQlJsWVpSZXZxMnpSSzdpK3BxZW9VYlFXMCtkbVpCWkNVNFJjVkV4Um1MCm9xSVo3bDJnUWFJeXF6K3ViWXpMWXlZZnREMWs5MlF4bjQxTERVaTVMYnA2UkpaM2tWRit1WkoyZHhMN1E3aUcKeWkyTGRLVlBONHhJZDByNnN2VzRnZHd3RVZqZ0p5OC96cWgrS2RTdm4rMFZ5Qm9LNmN3bU9PdnkxSEVzd2xOVQpvNXlzTjdiaTNCWXpza1RxZnNrZVc3ZTdZdWpnQUZmS2kwS0ZON04zQ0twcDd6R2NFd3lVak5ZZXZsOVUxakZWCktUS0p4T1dBNjNudXdxOVowMFVtYjlWRGhYa3BnK1g1dlJpOVlKdHVhZVRyUTVPU3V0eFExY1RLdU8xWjN5dFUKOThDc3hRck9ld2ZhREg3MFluN1JmS1lwWmxHTmVvSlJQZGk1QVFJREFRQUJBb0lCQUFUOVZXcnQvTzA0SXBjbAovUlkzUk1mMDVub09mSVVLV3Fta0srOS85Y04wMkNCbUtXUHNUOEFCMXhtcFNPVTVoN1gvWnJBRmlMajlPMElCClpDcC9Vd2NGbkx0MnBWZU1oK0Qwa0w2c2trbE51UnhRS0xkVjBmODkyZ2cxN09WZEJwSEtiZ01vRkVMejJNWFIKQXVOTzAyemlGOUhxZDdjNVZSQWtMcDdLS0xRdTV1aUpFVHBGRU42bzAzemVSaGZYMFhSK25lWGlzOFlGZ29lRwo0VUVPV0tRc3lDYUZNTkw1dmh6OGFIdjMySi9kRkM2QXB0YS9yTGoxdmN5ck5KOFpCWHlIbEtUZ3N3Y011R2g4CmpDblF5QWtlM1VuWFdDalEzb2pRSHdiUzNZRFNxVVpwQkRpRkt0SC9renhhcm5FTXZtc3gzQnhrWUl4UXZyMmQKZmdmL3VoMENnWUVBOElhMzR6emlBb0JldWcxbzVvOWtnR0ptWEFqNEpHb1lpRStzZFFIWXM2Nk9qNE92MG1TdQp5czljY3R0c3hBSkJLRGVFSlJibjB4Ry9IdFhzdXRhTitEeUtpT3k5MGNzWm03Qm9TeXBsUWZ6eU84YmdoMCsyCjh4cHJHc3FHZlRVNDJCUzdHQjNNdTcyMjFpYW0vVVJrUnFKUU00SVduMjd6ZC9hS3c0YXkyTDhDZ1lFQTdzUEgKSi8xNnRpelV4SGFPVndKWDV5aDl2SEIvdVFoay82Tk8vcXhRd2U1SmpXNGxnQ2RySnJrMGxnR3IvTFhZK0lpRQp5d1NQeEpSa21KS0pLOHBiTy9lYVJTYlVRWG5KeUwyYXVLTUxNOTZPWDJueUVTVzcxbXNjLzZ0QlNoMkZxQzVnCkZpMjNMTFllNUFjOFhtRlZCVGpKU1pPdFNjejlxRGJ4MGZKUkhqOENnWUJ4L1RhYXo5akFuTHZINnVFREc0UGkKd0QrSWNJdkJIakxnTHZvcFBhWElpZnB3RTFXNUhSUWxjNTU1cEhub2JyVVliUzhJTU5IUEd3TDdoeUM0NzBPNwpHRjREUkVjMWJNNkhGVlJvQXdDWUJGdDhidVVyMjJCMTE5aFlpeEN2dUJCbmYzT25tei9TM2JjUHlLT3ROY05jCklrSm1Gc25INTFvbFNVYzhJTk1UbXdLQmdFcUtQR2F6cjllbjcxVFdYbUd6alpYUkFsQU9IeEc1R1M3bDRRZUkKN0QxZkV2OWdYN3kra2FrcE1NMlVBQW1KREErckx5bmlacS9NVkF0OTArMGo0NCs1SVNSdGx0NUtFMERrVUtWTAo0WGQwQjZhcUFMUnh2MDZWYWh3M2JzNmdvTlY4aE9ybm9zUUdwMll4REJ1Njd6d09HbWtwL1hPSTE3Zm9UMTVuCjlBTXBBb0dBTkZvRGF0VEFqR3hUU1pKbnNDN1kzdWhEVllsOEVMN2hxQndpcmNITjNqNExjK25FcDlReWJHbloKNzJXQXIxL08xU2JlWmNCaHFIcEFkT1l1S056RXpRRzNGY3plRnhRMGR5NWpQaWF6eGd4M2ZkbkdybUQyU0I3VwpleVlXaEpPc0lWWVZQTjRqcHgwQTZ6NVdaUEVDRndseUtmY1RkaHFVbnRBdHhaNnhsOTQ9Ci0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb2dJQkFBS0NBUUVBNEZVd3NhQlJsWVpSZXZxMnpSSzdpK3BxZW9VYlFXMCtkbVpCWkNVNFJjVkV4Um1MCm9xSVo3bDJnUWFJeXF6K3ViWXpMWXlZZnREMWs5MlF4bjQxTERVaTVMYnA2UkpaM2tWRit1WkoyZHhMN1E3aUcKeWkyTGRLVlBONHhJZDByNnN2VzRnZHd3RVZqZ0p5OC96cWgrS2RTdm4rMFZ5Qm9LNmN3bU9PdnkxSEVzd2xOVQpvNXlzTjdiaTNCWXpza1RxZnNrZVc3ZTdZdWpnQUZmS2kwS0ZON04zQ0twcDd6R2NFd3lVak5ZZXZsOVUxakZWCktUS0p4T1dBNjNudXdxOVowMFVtYjlWRGhYa3BnK1g1dlJpOVlKdHVhZVRyUTVPU3V0eFExY1RLdU8xWjN5dFUKOThDc3hRck9ld2ZhREg3MFluN1JmS1lwWmxHTmVvSlJQZGk1QVFJREFRQUJBb0lCQUFUOVZXcnQvTzA0SXBjbAovUlkzUk1mMDVub09mSVVLV3Fta0srOS85Y04wMkNCbUtXUHNUOEFCMXhtcFNPVTVoN1gvWnJBRmlMajlPMElCClpDcC9Vd2NGbkx0MnBWZU1oK0Qwa0w2c2trbE51UnhRS0xkVjBmODkyZ2cxN09WZEJwSEtiZ01vRkVMejJNWFIKQXVOTzAyemlGOUhxZDdjNVZSQWtMcDdLS0xRdTV1aUpFVHBGRU42bzAzemVSaGZYMFhSK25lWGlzOFlGZ29lRwo0VUVPV0tRc3lDYUZNTkw1dmh6OGFIdjMySi9kRkM2QXB0YS9yTGoxdmN5ck5KOFpCWHlIbEtUZ3N3Y011R2g4CmpDblF5QWtlM1VuWFdDalEzb2pRSHdiUzNZRFNxVVpwQkRpRkt0SC9renhhcm5FTXZtc3gzQnhrWUl4UXZyMmQKZmdmL3VoMENnWUVBOElhMzR6emlBb0JldWcxbzVvOWtnR0ptWEFqNEpHb1lpRStzZFFIWXM2Nk9qNE92MG1TdQp5czljY3R0c3hBSkJLRGVFSlJibjB4Ry9IdFhzdXRhTitEeUtpT3k5MGNzWm03Qm9TeXBsUWZ6eU84YmdoMCsyCjh4cHJHc3FHZlRVNDJCUzdHQjNNdTcyMjFpYW0vVVJrUnFKUU00SVduMjd6ZC9hS3c0YXkyTDhDZ1lFQTdzUEgKSi8xNnRpelV4SGFPVndKWDV5aDl2SEIvdVFoay82Tk8vcXhRd2U1SmpXNGxnQ2RySnJrMGxnR3IvTFhZK0lpRQp5d1NQeEpSa21KS0pLOHBiTy9lYVJTYlVRWG5KeUwyYXVLTUxNOTZPWDJueUVTVzcxbXNjLzZ0QlNoMkZxQzVnCkZpMjNMTFllNUFjOFhtRlZCVGpKU1pPdFNjejlxRGJ4MGZKUkhqOENnWUJ4L1RhYXo5akFuTHZINnVFREc0UGkKd0QrSWNJdkJIakxnTHZvcFBhWElpZnB3RTFXNUhSUWxjNTU1cEhub2JyVVliUzhJTU5IUEd3TDdoeUM0NzBPNwpHRjREUkVjMWJNNkhGVlJvQXdDWUJGdDhidVVyMjJCMTE5aFlpeEN2dUJCbmYzT25tei9TM2JjUHlLT3ROY05jCklrSm1Gc25INTFvbFNVYzhJTk1UbXdLQmdFcUtQR2F6cjllbjcxVFdYbUd6alpYUkFsQU9IeEc1R1M3bDRRZUkKN0QxZkV2OWdYN3kra2FrcE1NMlVBQW1KREErckx5bmlacS9NVkF0OTArMGo0NCs1SVNSdGx0NUtFMERrVUtWTAo0WGQwQjZhcUFMUnh2MDZWYWh3M2JzNmdvTlY4aE9ybm9zUUdwMll4REJ1Njd6d09HbWtwL1hPSTE3Zm9UMTVuCjlBTXBBb0dBTkZvRGF0VEFqR3hUU1pKbnNDN1kzdWhEVllsOEVMN2hxQndpcmNITjNqNExjK25FcDlReWJHbloKNzJXQXIxL08xU2JlWmNCaHFIcEFkT1l1S056RXpRRzNGY3plRnhRMGR5NWpQaWF6eGd4M2ZkbkdybUQyU0I3VwpleVlXaEpPc0lWWVZQTjRqcHgwQTZ6NVdaUEVDRndseUtmY1RkaHFVbnRBdHhaNnhsOTQ9Ci0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' ']' + desc 'check if CA issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if CA issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-tls-issue-pxc-ca-issuer + local resource=issuer/some-name-tls-issue-pxc-ca-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer.yml + local new_result=/tmp/tmp.bIebaWzBVY/issuer_some-name-tls-issue-pxc-ca-issuer.yml + desc 'compare issuer/some-name-tls-issue-pxc-ca-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare issuer/some-name-tls-issue-pxc-ca-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.33 >= 1.33' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k133.yml ']' + version_gt 1.29 ++ echo '1.33 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k129.yml ']' + version_gt 1.27 ++ bc -l ++ echo '1.33 >= 1.27' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k127.yml ']' + version_gt 1.24 ++ echo '1.33 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k124.yml ']' + version_gt 1.22 ++ bc -l ++ echo '1.33 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k122.yml ']' + version_gt 1.21 ++ echo '1.33 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-oc.yml ']' + version_gt 1.29 ++ echo '1.33 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-aks.yml ']' + kubectl_bin get -o yaml issuer/some-name-tls-issue-pxc-ca-issuer ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | del(.spec.updateStrategy.rollingUpdate.maxUnavailable) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1530", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.977qEwqarB ++ mktemp + local LAST_ERR=/tmp/tmp.ZIXLA38RaT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-tls-issue-pxc-ca-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.977qEwqarB + cat /tmp/tmp.ZIXLA38RaT + rm /tmp/tmp.977qEwqarB /tmp/tmp.ZIXLA38RaT + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer.yml /tmp/tmp.bIebaWzBVY/issuer_some-name-tls-issue-pxc-ca-issuer.yml + log 'compare_kubectl: issuer/some-name-tls-issue-pxc-ca-issuer OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-05-17T02:08:57+0000]' compare_kubectl: issuer/some-name-tls-issue-pxc-ca-issuer OK [2026-05-17T02:08:57+0000] compare_kubectl: issuer/some-name-tls-issue-pxc-ca-issuer OK + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-tls-issue-pxc-issuer + local resource=issuer/some-name-tls-issue-pxc-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer.yml + local new_result=/tmp/tmp.bIebaWzBVY/issuer_some-name-tls-issue-pxc-issuer.yml + desc 'compare issuer/some-name-tls-issue-pxc-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare issuer/some-name-tls-issue-pxc-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.33 >= 1.33' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k133.yml ']' + version_gt 1.29 ++ echo '1.33 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k129.yml ']' + version_gt 1.27 ++ echo '1.33 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k127.yml ']' + version_gt 1.24 ++ echo '1.33 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k124.yml ']' + version_gt 1.22 ++ echo '1.33 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k122.yml ']' + version_gt 1.21 ++ echo '1.33 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-oc.yml ']' + version_gt 1.29 ++ echo '1.33 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-aks.yml ']' + kubectl_bin get -o yaml issuer/some-name-tls-issue-pxc-issuer ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | del(.spec.updateStrategy.rollingUpdate.maxUnavailable) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1530", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.FQBupVBDD7 ++ mktemp + local LAST_ERR=/tmp/tmp.bkKudDDqcE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-tls-issue-pxc-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FQBupVBDD7 + cat /tmp/tmp.bkKudDDqcE + rm /tmp/tmp.FQBupVBDD7 /tmp/tmp.bkKudDDqcE + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer.yml /tmp/tmp.bIebaWzBVY/issuer_some-name-tls-issue-pxc-issuer.yml + log 'compare_kubectl: issuer/some-name-tls-issue-pxc-issuer OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-05-17T02:08:58+0000]' compare_kubectl: issuer/some-name-tls-issue-pxc-issuer OK [2026-05-17T02:08:58+0000] compare_kubectl: issuer/some-name-tls-issue-pxc-issuer OK + desc 'check if certificate issued' + set +o xtrace ----------------------------------------------------------------------------------- check if certificate issued ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-tls-issue-ssl + local resource=certificate/some-name-tls-issue-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl.yml + local new_result=/tmp/tmp.bIebaWzBVY/certificate_some-name-tls-issue-ssl.yml + desc 'compare certificate/some-name-tls-issue-ssl-' + set +o xtrace ----------------------------------------------------------------------------------- compare certificate/some-name-tls-issue-ssl- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.33 >= 1.33' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k133.yml ']' + version_gt 1.29 ++ echo '1.33 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k129.yml ']' + version_gt 1.27 ++ echo '1.33 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k127.yml ']' + version_gt 1.24 ++ echo '1.33 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k124.yml ']' + version_gt 1.22 ++ echo '1.33 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k122.yml ']' + version_gt 1.21 ++ echo '1.33 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-oc.yml ']' + version_gt 1.29 ++ echo '1.33 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-aks.yml ']' + kubectl_bin get -o yaml certificate/some-name-tls-issue-ssl ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | del(.spec.updateStrategy.rollingUpdate.maxUnavailable) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-1530", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.B75GiELaRB ++ mktemp + local LAST_ERR=/tmp/tmp.aumljCSYVg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-tls-issue-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.B75GiELaRB + cat /tmp/tmp.aumljCSYVg + rm /tmp/tmp.B75GiELaRB /tmp/tmp.aumljCSYVg + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2473/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl.yml /tmp/tmp.bIebaWzBVY/certificate_some-name-tls-issue-ssl.yml + log 'compare_kubectl: certificate/some-name-tls-issue-ssl OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-05-17T02:08:59+0000]' compare_kubectl: certificate/some-name-tls-issue-ssl OK [2026-05-17T02:08:59+0000] compare_kubectl: certificate/some-name-tls-issue-ssl OK + desc 'check ssl-internal certificate using PXC' + set +o xtrace ----------------------------------------------------------------------------------- check ssl-internal certificate using PXC ----------------------------------------------------------------------------------- + check_verify_identity some-name-tls-issue-pxc + local host=some-name-tls-issue-pxc + local command=exit + local 'args=--ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' + kubectl_bin exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' ++ mktemp + local LAST_OUT=/tmp/tmp.zQQGZQrmqW ++ mktemp + local LAST_ERR=/tmp/tmp.wXEP2pBK3U + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zQQGZQrmqW + cat /tmp/tmp.wXEP2pBK3U mysql: [Warning] Using a password on the command line interface can be insecure. + rm /tmp/tmp.zQQGZQrmqW /tmp/tmp.wXEP2pBK3U + return 0 + desc 'check ssl-internal certificate using HAProxy' + set +o xtrace ----------------------------------------------------------------------------------- check ssl-internal certificate using HAProxy ----------------------------------------------------------------------------------- + check_verify_identity some-name-tls-issue-haproxy + local host=some-name-tls-issue-haproxy + local command=exit + local 'args=--ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' + kubectl_bin exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' ++ mktemp + local LAST_OUT=/tmp/tmp.k3OymikTNy ++ mktemp + local LAST_ERR=/tmp/tmp.qni4tY1Pkk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.k3OymikTNy + cat /tmp/tmp.qni4tY1Pkk mysql: [Warning] Using a password on the command line interface can be insecure. + rm /tmp/tmp.k3OymikTNy /tmp/tmp.qni4tY1Pkk + return 0 + desc 'trigger CA rotation and verify leaf cert re-issuance' + set +o xtrace ----------------------------------------------------------------------------------- trigger CA rotation and verify leaf cert re-issuance ----------------------------------------------------------------------------------- + trigger_ca_rotation + desc 'set rotationPolicy=Always on CA certificate' + set +o xtrace ----------------------------------------------------------------------------------- set rotationPolicy=Always on CA certificate ----------------------------------------------------------------------------------- + kubectl_bin patch certificate some-name-tls-issue-ca-cert --type=merge -p '{"spec":{"privateKey":{"rotationPolicy":"Always"}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.bvlsEyb1ak ++ mktemp + local LAST_ERR=/tmp/tmp.nH1Wl8LSL2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch certificate some-name-tls-issue-ca-cert --type=merge -p '{"spec":{"privateKey":{"rotationPolicy":"Always"}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bvlsEyb1ak certificate.cert-manager.io/some-name-tls-issue-ca-cert patched + cat /tmp/tmp.nH1Wl8LSL2 + rm /tmp/tmp.bvlsEyb1ak /tmp/tmp.nH1Wl8LSL2 + return 0 + desc 'capture current CA fingerprint' + set +o xtrace ----------------------------------------------------------------------------------- capture current CA fingerprint ----------------------------------------------------------------------------------- + local old_ca_md5 ++ openssl x509 -noout -fingerprint -sha256 ++ kubectl_bin get secret some-name-tls-issue-ca-cert -o 'jsonpath={.data.tls\.crt}' ++ base64 -d +++ mktemp ++ local LAST_OUT=/tmp/tmp.AOG5ybD8li +++ mktemp ++ local LAST_ERR=/tmp/tmp.AQxAnuCveH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secret some-name-tls-issue-ca-cert -o 'jsonpath={.data.tls\.crt}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AOG5ybD8li ++ cat /tmp/tmp.AQxAnuCveH ++ rm /tmp/tmp.AOG5ybD8li /tmp/tmp.AQxAnuCveH ++ return 0 + old_ca_md5='SHA256 Fingerprint=A9:F3:40:28:E0:45:E8:A4:A0:48:8C:1F:60:C1:21:30:1D:A3:8D:11:17:F1:39:C6:A3:F3:7E:77:4F:5D:4B:D1' + echo 'old CA: SHA256 Fingerprint=A9:F3:40:28:E0:45:E8:A4:A0:48:8C:1F:60:C1:21:30:1D:A3:8D:11:17:F1:39:C6:A3:F3:7E:77:4F:5D:4B:D1' old CA: SHA256 Fingerprint=A9:F3:40:28:E0:45:E8:A4:A0:48:8C:1F:60:C1:21:30:1D:A3:8D:11:17:F1:39:C6:A3:F3:7E:77:4F:5D:4B:D1 + desc 'trigger CA renewal via status condition patch' + set +o xtrace ----------------------------------------------------------------------------------- trigger CA renewal via status condition patch ----------------------------------------------------------------------------------- + local now ++ date -u +%Y-%m-%dT%H:%M:%SZ + now=2026-05-17T02:09:06Z + jq --arg now 2026-05-17T02:09:06Z '.status.conditions = [{"type":"Issuing","status":"True","reason":"ManuallyTriggered","message":"Certificate renewal triggered","lastTransitionTime":$now}]' + kubectl_bin get certificate some-name-tls-issue-ca-cert -o json + kubectl_bin replace --subresource=status -f - ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.NzYp2V47hH + local LAST_OUT=/tmp/tmp.xI2yLb1YCg ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.LE4Dt2Xz0d + local exit_status=0 + local LAST_ERR=/tmp/tmp.u0Avw5E9FE + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get certificate some-name-tls-issue-ca-cert -o json + for i in '$(seq 0 2)' + set +e + kubectl replace --subresource=status -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NzYp2V47hH + cat /tmp/tmp.LE4Dt2Xz0d + rm /tmp/tmp.NzYp2V47hH /tmp/tmp.LE4Dt2Xz0d + return 0 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xI2yLb1YCg certificate.cert-manager.io/some-name-tls-issue-ca-cert replaced + cat /tmp/tmp.u0Avw5E9FE + rm /tmp/tmp.xI2yLb1YCg /tmp/tmp.u0Avw5E9FE + return 0 + desc 'wait for cert-manager to issue new CA' + set +o xtrace ----------------------------------------------------------------------------------- wait for cert-manager to issue new CA ----------------------------------------------------------------------------------- + local retries=30 + local new_ca_md5= ++ seq 1 30 + for _ in '$(seq 1 $retries)' ++ kubectl_bin get secret some-name-tls-issue-ca-cert -o 'jsonpath={.data.tls\.crt}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4XgQRCOiHK +++ mktemp ++ base64 -d ++ local LAST_ERR=/tmp/tmp.ItQfkTaimi ++ local exit_status=0 ++ openssl x509 -noout -fingerprint -sha256 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secret some-name-tls-issue-ca-cert -o 'jsonpath={.data.tls\.crt}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4XgQRCOiHK ++ cat /tmp/tmp.ItQfkTaimi ++ rm /tmp/tmp.4XgQRCOiHK /tmp/tmp.ItQfkTaimi ++ return 0 + new_ca_md5='SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' + '[' 'SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' '!=' 'SHA256 Fingerprint=A9:F3:40:28:E0:45:E8:A4:A0:48:8C:1F:60:C1:21:30:1D:A3:8D:11:17:F1:39:C6:A3:F3:7E:77:4F:5D:4B:D1' ']' + echo 'new CA: SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' new CA: SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1 + break + '[' 'SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' == 'SHA256 Fingerprint=A9:F3:40:28:E0:45:E8:A4:A0:48:8C:1F:60:C1:21:30:1D:A3:8D:11:17:F1:39:C6:A3:F3:7E:77:4F:5D:4B:D1' ']' + desc 'trigger operator reconcile to detect CA mismatch' + set +o xtrace ----------------------------------------------------------------------------------- trigger operator reconcile to detect CA mismatch ----------------------------------------------------------------------------------- ++ date -u +%Y-%m-%dT%H:%M:%SZ + kubectl_bin annotate pxc some-name-tls-issue percona.com/ca-rotated-at=2026-05-17T02:09:11Z --overwrite ++ mktemp + local LAST_OUT=/tmp/tmp.6FdJLWoxVe ++ mktemp + local LAST_ERR=/tmp/tmp.cuX2TrzFcG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl annotate pxc some-name-tls-issue percona.com/ca-rotated-at=2026-05-17T02:09:11Z --overwrite + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6FdJLWoxVe perconaxtradbcluster.pxc.percona.com/some-name-tls-issue annotated + cat /tmp/tmp.cuX2TrzFcG + rm /tmp/tmp.6FdJLWoxVe /tmp/tmp.cuX2TrzFcG + return 0 + desc 'wait for operator to re-issue leaf certs with new CA' + set +o xtrace ----------------------------------------------------------------------------------- wait for operator to re-issue leaf certs with new CA ----------------------------------------------------------------------------------- + local leaf_retries=60 + local leaf_ca_md5= + local leaf_internal_ca_md5= ++ seq 1 60 + for _ in '$(seq 1 $leaf_retries)' ++ kubectl_bin get secret some-name-tls-issue-ssl -o 'jsonpath={.data.ca\.crt}' ++ openssl x509 -noout -fingerprint -sha256 ++ base64 -d + leaf_ca_md5='SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' ++ kubectl_bin get secret some-name-tls-issue-ssl-internal -o 'jsonpath={.data.ca\.crt}' ++ base64 -d ++ openssl x509 -noout -fingerprint -sha256 + leaf_internal_ca_md5='SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' + '[' 'SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' == 'SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' ']' + '[' 'SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' == 'SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' ']' + break + desc 'verify leaf secrets have new CA' + set +o xtrace ----------------------------------------------------------------------------------- verify leaf secrets have new CA ----------------------------------------------------------------------------------- + '[' 'SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' '!=' 'SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' ']' + '[' 'SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' '!=' 'SHA256 Fingerprint=6B:C4:1F:D0:48:DA:32:1C:A5:CB:0F:C5:C5:F9:E7:A3:D3:DD:9C:CA:DD:41:6B:AC:55:51:3F:8A:D3:23:0B:D1' ']' + echo 'leaf certs re-issued with new CA' leaf certs re-issued with new CA + desc 'restart all PXC and HAProxy pods to pick up new certs simultaneously' + set +o xtrace ----------------------------------------------------------------------------------- restart all PXC and HAProxy pods to pick up new certs simultaneously ----------------------------------------------------------------------------------- + sleep 30 + kubectl_bin delete pods -l app.kubernetes.io/instance=some-name-tls-issue,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --force --grace-period=0 ++ mktemp + local LAST_OUT=/tmp/tmp.XvQz4c2QVF ++ mktemp + local LAST_ERR=/tmp/tmp.WxS0x1AMQU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pods -l app.kubernetes.io/instance=some-name-tls-issue,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --force --grace-period=0 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XvQz4c2QVF pod "some-name-tls-issue-haproxy-0" force deleted from tls-issue-cert-manager-1530 namespace pod "some-name-tls-issue-haproxy-1" force deleted from tls-issue-cert-manager-1530 namespace pod "some-name-tls-issue-pxc-0" force deleted from tls-issue-cert-manager-1530 namespace pod "some-name-tls-issue-pxc-1" force deleted from tls-issue-cert-manager-1530 namespace pod "some-name-tls-issue-pxc-2" force deleted from tls-issue-cert-manager-1530 namespace + cat /tmp/tmp.WxS0x1AMQU Warning: Immediate deletion does not wait for confirmation that the running resource has been terminated. The resource may continue to run on the cluster indefinitely. + rm /tmp/tmp.XvQz4c2QVF /tmp/tmp.WxS0x1AMQU + return 0 + desc 'wait for cluster to recover after full restart' + set +o xtrace ----------------------------------------------------------------------------------- wait for cluster to recover after full restart ----------------------------------------------------------------------------------- + wait_for_running some-name-tls-issue-haproxy 1 + local name=some-name-tls-issue-haproxy + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-haproxy-0 480 + local pod=some-name-tls-issue-haproxy-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-haproxy-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/some-name-tls-issue-haproxy-0 condition met waiting for pod/some-name-tls-issue-haproxy-0 to become Ready.Ok + wait_for_running some-name-tls-issue-pxc 3 + local name=some-name-tls-issue-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-0 480 + local pod=some-name-tls-issue-pxc-0 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-tls-issue-pxc-0 ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-0 condition met E0517 02:15:41.890233 31290 reflector.go:227] "Failed to watch" err="Get \"https://35.202.69.227/api/v1/namespaces/tls-issue-cert-manager-1530/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dsome-name-tls-issue-pxc-0&resourceVersion=1778984141383711012&timeoutSeconds=376&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/some-name-tls-issue-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-1 480 + local pod=some-name-tls-issue-pxc-1 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo some-name-tls-issue-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-1 condition met waiting for pod/some-name-tls-issue-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-2 480 + local pod=some-name-tls-issue-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-2 condition met E0517 02:16:02.416281 672 reflector.go:227] "Failed to watch" err="Get \"https://35.202.69.227/api/v1/namespaces/tls-issue-cert-manager-1530/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dsome-name-tls-issue-pxc-2&resourceVersion=1778984160453295012&timeoutSeconds=340&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/some-name-tls-issue-pxc-2 to become Ready.Ok + wait_cluster_consistency some-name-tls-issue 3 2 + local cluster_name=some-name-tls-issue + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name-tls-issue to be ready' waiting for pxc/some-name-tls-issue to be ready++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.doNV6CKPCu +++ mktemp ++ local LAST_ERR=/tmp/tmp.8LWqMNe8xV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.doNV6CKPCu ++ cat /tmp/tmp.8LWqMNe8xV ++ rm /tmp/tmp.doNV6CKPCu /tmp/tmp.8LWqMNe8xV ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CEdflTQeNF +++ mktemp ++ local LAST_ERR=/tmp/tmp.fvqY2A9fB1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CEdflTQeNF ++ cat /tmp/tmp.fvqY2A9fB1 ++ rm /tmp/tmp.CEdflTQeNF /tmp/tmp.fvqY2A9fB1 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issue +++ local cluster_name=some-name-tls-issue ++++ get_proxy some-name-tls-issue ++++ local target_cluster=some-name-tls-issue +++++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.WTyCqJw9Bk ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.BdikjCBQu4 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.WTyCqJw9Bk +++++ cat /tmp/tmp.BdikjCBQu4 +++++ rm /tmp/tmp.WTyCqJw9Bk /tmp/tmp.BdikjCBQu4 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issue-haproxy ++++ return +++ local cluster_proxy=some-name-tls-issue-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TgACtQTu27 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rsDNkKPXzB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TgACtQTu27 ++ cat /tmp/tmp.rsDNkKPXzB ++ rm /tmp/tmp.TgACtQTu27 /tmp/tmp.rsDNkKPXzB ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check ssl-internal certificate using PXC after CA rotation' + set +o xtrace ----------------------------------------------------------------------------------- check ssl-internal certificate using PXC after CA rotation ----------------------------------------------------------------------------------- + check_verify_identity some-name-tls-issue-pxc + local host=some-name-tls-issue-pxc + local command=exit + local 'args=--ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' + kubectl_bin exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' ++ mktemp + local LAST_OUT=/tmp/tmp.zQMmY2bcYa ++ mktemp + local LAST_ERR=/tmp/tmp.Uexw6WTi7x + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zQMmY2bcYa + cat /tmp/tmp.Uexw6WTi7x mysql: [Warning] Using a password on the command line interface can be insecure. + rm /tmp/tmp.zQMmY2bcYa /tmp/tmp.Uexw6WTi7x + return 0 + desc 'check ssl-internal certificate using HAProxy after CA rotation' + set +o xtrace ----------------------------------------------------------------------------------- check ssl-internal certificate using HAProxy after CA rotation ----------------------------------------------------------------------------------- + check_verify_identity some-name-tls-issue-haproxy + local host=some-name-tls-issue-haproxy + local command=exit + local 'args=--ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' + kubectl_bin exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' ++ mktemp + local LAST_OUT=/tmp/tmp.uOHmKlMB4s ++ mktemp + local LAST_ERR=/tmp/tmp.3wgDBMHWvW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uOHmKlMB4s + cat /tmp/tmp.3wgDBMHWvW mysql: [Warning] Using a password on the command line interface can be insecure. + rm /tmp/tmp.uOHmKlMB4s /tmp/tmp.3wgDBMHWvW + return 0 + destroy tls-issue-cert-manager-1530 + local namespace=tls-issue-cert-manager-1530 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + grep -v 'the object has been modified' + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.bIebaWzBVY/operator.log + grep -v 'get backup status: Job.batch' +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ head -1 +++ mktemp ++ local LAST_OUT=/tmp/tmp.z2r8uCOdF3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.f2QpJNNA9f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z2r8uCOdF3 ++ cat /tmp/tmp.f2QpJNNA9f ++ rm /tmp/tmp.z2r8uCOdF3 /tmp/tmp.f2QpJNNA9f ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-55d95dc9d8-6njnz ++ mktemp + local LAST_OUT=/tmp/tmp.XQY6szL6RF ++ mktemp + local LAST_ERR=/tmp/tmp.fmIXDZntqN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-55d95dc9d8-6njnz + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XQY6szL6RF + cat /tmp/tmp.fmIXDZntqN + rm /tmp/tmp.XQY6szL6RF /tmp/tmp.fmIXDZntqN + return 0 2026-05-17T02:02:17.941Z INFO setup Manager starting up {"gitCommit": "6d392bea35083cf1f9dca50d602c31a2f57502ed", "gitBranch": "PR-2473-6d392bea", "buildTime": "2026-05-16T22:17:40Z", "goVersion": "go1.26.3", "os": "linux", "arch": "amd64"} 2026-05-17T02:02:17.941Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.33.11-gke.1137000"} 2026-05-17T02:02:17.942Z INFO setup Feature gates {"PXCO_FEATURE_GATES": "", "enabled": ""} 2026-05-17T02:02:17.944Z INFO setup Registering Components. 2026-05-17T02:02:18.856Z INFO controller-runtime.metrics Starting metrics server 2026-05-17T02:02:18.856Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2026-05-17T02:02:18.856Z INFO setup Starting the Cmd. 2026-05-17T02:02:18.857Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2026-05-17T02:02:18.857Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2026-05-17T02:02:18.857Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2026-05-17T02:02:18.857Z INFO controller-runtime.webhook Starting webhook server 2026-05-17T02:02:18.857Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2026-05-17T02:02:18.858Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2026-05-17T02:02:18.957Z INFO Attempting to acquire leader lease... {"lock": "pxc-operator/08db1feb.percona.com"} 2026-05-17T02:02:18.982Z DEBUG events percona-xtradb-cluster-operator-55d95dc9d8-6njnz_5bae779f-10fb-40c4-a948-ee4f0aefaecf became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"1972f7eb-5ea8-4e3c-9b26-c26d6f08264a","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1778983338976751009"}, "reason": "LeaderElection"} 2026-05-17T02:02:18.982Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2026-05-17T02:02:18.982Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2026-05-17T02:02:18.982Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2026-05-17T02:02:18.982Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2026-05-17T02:02:18.982Z INFO Successfully acquired lease {"lock": "pxc-operator/08db1feb.percona.com"} 2026-05-17T02:02:19.183Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2026-05-17T02:02:19.183Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2026-05-17T02:02:19.183Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2026-05-17T02:02:19.183Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2026-05-17T02:02:19.183Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2026-05-17T02:02:19.183Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2026-05-17T02:05:03.343Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "f9af8dac-808c-4196-810a-b8d33614dda4", "version": "1.20.0"} 2026-05-17T02:05:09.775Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "f9af8dac-808c-4196-810a-b8d33614dda4", "object": "auto-some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2026-05-17T02:05:09.892Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "f9af8dac-808c-4196-810a-b8d33614dda4", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-05-17T02:05:09.936Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "f9af8dac-808c-4196-810a-b8d33614dda4", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-05-17T02:05:09.997Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "f9af8dac-808c-4196-810a-b8d33614dda4", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-05-17T02:05:10.069Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "f9af8dac-808c-4196-810a-b8d33614dda4", "object": "some-name-tls-issue-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-05-17T02:05:10.181Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "f9af8dac-808c-4196-810a-b8d33614dda4", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-05-17T02:05:10.466Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "f9af8dac-808c-4196-810a-b8d33614dda4", "object": "some-name-tls-issue-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-05-17T02:05:11.517Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "e0d67bba-83f1-47ef-9868-a3113f7552e4", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-05-17T02:05:11.543Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "e0d67bba-83f1-47ef-9868-a3113f7552e4", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-05-17T02:06:02.551Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "1d061a85-ad01-46cc-8e14-cc3bfc8d85f8", "user": "operator"} 2026-05-17T02:06:02.586Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "1d061a85-ad01-46cc-8e14-cc3bfc8d85f8", "user": "monitor"} 2026-05-17T02:06:02.637Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "1d061a85-ad01-46cc-8e14-cc3bfc8d85f8"} 2026-05-17T02:06:02.672Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "1d061a85-ad01-46cc-8e14-cc3bfc8d85f8"} 2026-05-17T02:06:02.704Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "1d061a85-ad01-46cc-8e14-cc3bfc8d85f8", "user": "xtrabackup"} 2026-05-17T02:06:02.750Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "1d061a85-ad01-46cc-8e14-cc3bfc8d85f8"} 2026-05-17T02:06:02.779Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "1d061a85-ad01-46cc-8e14-cc3bfc8d85f8", "user": "replication"} 2026-05-17T02:08:36.690Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "d6b3357b-8a89-4ae1-83d2-c44c0bc530f5", "user": "root"} 2026-05-17T02:08:36.782Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "d6b3357b-8a89-4ae1-83d2-c44c0bc530f5", "new version": "8.0.43-34.1"} 2026-05-17T02:09:13.552Z DEBUG events CA certificate rotated, re-issuing leaf TLS certificates {"type": "Normal", "object": {"kind":"PerconaXtraDBCluster","namespace":"tls-issue-cert-manager-1530","name":"some-name-tls-issue","uid":"362a32d6-6503-40ee-84b3-c7c3c2bdd4b4","apiVersion":"pxc.percona.com/v1","resourceVersion":"1778983753435247001"}, "reason": "CARotation"} 2026-05-17T02:09:13.552Z INFO CA certificate rotation detected, re-issuing leaf TLS certificates {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "a9ad50fd-6395-47c5-ae7f-585b15fabb75", "cluster": "some-name-tls-issue", "sslMismatch": true, "sslInternalMismatch": true} 2026-05-17T02:09:13.652Z INFO Triggering leaf certificate re-issuance {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "a9ad50fd-6395-47c5-ae7f-585b15fabb75", "certificate": "some-name-tls-issue-ssl"} 2026-05-17T02:09:13.681Z INFO Triggering leaf certificate re-issuance {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "a9ad50fd-6395-47c5-ae7f-585b15fabb75", "certificate": "some-name-tls-issue-ssl-internal"} 2026-05-17T02:09:19.051Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "3fd44705-5830-4b88-99d5-9072e730f307", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-05-17T02:15:03.708Z INFO Results of scanning sequences {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "96aaf6ba-46fa-4a7a-b202-a9aa17f83d6c", "pod": "some-name-tls-issue-pxc-0", "maxSeq": 39} 2026-05-17T02:15:03.708Z INFO We are in full cluster crash, starting recovery {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "96aaf6ba-46fa-4a7a-b202-a9aa17f83d6c"} 2026-05-17T02:15:35.922Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-1530"}, "namespace": "tls-issue-cert-manager-1530", "name": "some-name-tls-issue", "reconcileID": "803d2c54-9024-470c-b31f-27e5aa5b1702", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.231.75:3306: connect: connection refused"} -  }, -  { -  }, +  }, -  "1e9d16db41b1e2287ea5335c6209e269", -  "5a9665a4d9e1476aa73378897051c77e", -  Annotations: map[string]string{ +  Annotations: map[string]string{ -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", +  AvailableReplicas: 0, -  AvailableReplicas: 3, +  "ce46b1d31582032ccf4e12e1109da0f1", -  CollisionCount: &0, +  CollisionCount: nil, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2026-05-17 02:05:09 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-tls-issue-pxc-5b4c76b87b", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", +  "e433cd0935f15d39620d391a12d2260d", -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  Operation: "Update", -  Operation: "Update", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InB4YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiIxZTlkMTZkYjQxYjFlMjI4N2VhNTMzNWM2"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn19LCJ0ZW1wbGF0ZSI6eyJtZXRhZGF0YSI6eyJsYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUtdGxzLWlzc3VlIiwiYXBwLmt1YmVybmV0ZXMuaW8vbWFuYWdlZC1ieSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXItb3BlcmF0b3IiLCJhcHAua3ViZXJuZXRlcy5pby9uYW1lIjoicGVyY29uYS14dHJhZGItY2x1c3RlciIsImFwcC5rdWJlcm5ldGVzLmlvL3BhcnQtb2YiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIn0sImFubm90YXRpb25zIjp7Imt1YmVjdGwua3ViZXJuZXRlcy5pby9kZWZhdWx0LWNvbnRhaW5lciI6InB4YyIsInBlcmNvbmEuY29tL2NvbmZpZ3VyYXRpb24taGFzaCI6ImQ0MWQ4Y2Q5OGYwMGIyMDRlOTgwMDk5OGVjZjg0MjdlIiwicGVyY29uYS5jb20vc3NsLWhhc2giOiJlNDMzY2QwOTM1ZjE1ZDM5NjIwZDM5MWEx"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 3, +  Replicas: 0, -  Replicas: 3, +  ResourceVersion: "", -  ResourceVersion: "1778983712942847017", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2026-05-17 02:05:09 +0000 UTC", -  Time: s"2026-05-17 02:08:32 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "20309461-3a5d-472e-94ca-6343aa521627", +  UpdatedReplicas: 0, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-tls-issue-pxc-5b4c76b87b", -  VolumeMode: &"Filesystem", +  VolumeMode: nil,   }    },    },    {    },    },    {    },    }, ""),    },    {    },    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 9 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: &{PodAntiAffinity: &{RequiredDuringSchedulingIgnoredDuringExecution: {{LabelSelector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name-tls-issue", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, TopologyKey: "kubernetes.io/hostname"}}}},    Annotations: map[string]string{    Args: {"mysqld"},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    Containers: []v1.Container{    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-tls-issue-env-vars-pxc"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-tls-issue-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name-tls-issue"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name-tls-issue"}, Key: "xtrabackup"}}}, ...},    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    Finalizers: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostIP: "",    HostPort: 0,    ImagePullPolicy: "Always",    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    "kubectl.kubernetes.io/default-container": "pxc",    Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name-tls-issue", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...},    Labels: nil,    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-tls-issue-pxc"},    LocalObjectReference: {Name: "some-name-tls-issue-pxc"},    ManagedFields: nil,    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    Name: "config",    Name: "ist",    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    Name: "mysql-users-secret-file",    Name: "mysqlx",    Namespace: "tls-issue-cert-manager-1530",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name-tls-issue", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name-tls-issue", UID: "362a32d6-6503-40ee-84b3-c7c3c2bdd4b4", ...}},    OwnerReferences: nil,    "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e",    "percona.com/ssl-hash": strings.Join({    "percona.com/ssl-internal-hash": strings.Join({    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &3,    SecretName: "internal-some-name-tls-issue",    SecretName: "some-name-tls-issue-mysql-init",    SecretName: "some-name-tls-issue-ssl",    SecretName: "some-name-tls-issue-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name-tls-issue", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-tls-issue-pxc",    SetHostnameAsFQDN: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    VolumeAttributesClassName: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n tls-issue-cert-manager-1530 some-name-tls-issue --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issue patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.0o9SaNX3KO ++ mktemp + local LAST_ERR=/tmp/tmp.EbNU4Xl5KW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0o9SaNX3KO perconaxtradbcluster.pxc.percona.com "some-name-tls-issue" deleted from tls-issue-cert-manager-1530 namespace + cat /tmp/tmp.EbNU4Xl5KW + rm /tmp/tmp.0o9SaNX3KO /tmp/tmp.EbNU4Xl5KW + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.1y3fRzNSpp ++ mktemp + local LAST_ERR=/tmp/tmp.6nnDYuk7xc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1y3fRzNSpp No resources found + cat /tmp/tmp.6nnDYuk7xc + rm /tmp/tmp.1y3fRzNSpp /tmp/tmp.6nnDYuk7xc + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.1hDWIIu7ht ++ mktemp + local LAST_ERR=/tmp/tmp.Bl2B51u8Tp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1hDWIIu7ht No resources found + cat /tmp/tmp.Bl2B51u8Tp + rm /tmp/tmp.1hDWIIu7ht /tmp/tmp.Bl2B51u8Tp + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.GxOiCYlFo0 ++ mktemp + local LAST_ERR=/tmp/tmp.jlFqwcS1kK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GxOiCYlFo0 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.jlFqwcS1kK + rm /tmp/tmp.GxOiCYlFo0 /tmp/tmp.jlFqwcS1kK + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml namespace "cert-manager" deleted customresourcedefinition.apiextensions.k8s.io "challenges.acme.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "orders.acme.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "certificaterequests.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "certificates.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "clusterissuers.cert-manager.io" deleted customresourcedefinition.apiextensions.k8s.io "issuers.cert-manager.io" deleted serviceaccount "cert-manager-cainjector" deleted from cert-manager namespace serviceaccount "cert-manager" deleted from cert-manager namespace serviceaccount "cert-manager-webhook" deleted from cert-manager namespace clusterrole.rbac.authorization.k8s.io "cert-manager-cainjector" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-issuers" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-certificates" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-orders" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-challenges" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-cluster-view" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-view" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-edit" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" deleted clusterrole.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-cainjector" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-issuers" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-clusterissuers" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-certificates" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-orders" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-challenges" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-ingress-shim" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-approve:cert-manager-io" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-controller-certificatesigningrequests" deleted clusterrolebinding.rbac.authorization.k8s.io "cert-manager-webhook:subjectaccessreviews" deleted role.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" deleted from kube-system namespace role.rbac.authorization.k8s.io "cert-manager:leaderelection" deleted from kube-system namespace role.rbac.authorization.k8s.io "cert-manager-tokenrequest" deleted from cert-manager namespace role.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" deleted from cert-manager namespace rolebinding.rbac.authorization.k8s.io "cert-manager-cainjector:leaderelection" deleted from kube-system namespace rolebinding.rbac.authorization.k8s.io "cert-manager:leaderelection" deleted from kube-system namespace rolebinding.rbac.authorization.k8s.io "cert-manager-tokenrequest" deleted from cert-manager namespace rolebinding.rbac.authorization.k8s.io "cert-manager-webhook:dynamic-serving" deleted from cert-manager namespace service "cert-manager-cainjector" deleted from cert-manager namespace service "cert-manager" deleted from cert-manager namespace service "cert-manager-webhook" deleted from cert-manager namespace deployment.apps "cert-manager-cainjector" deleted from cert-manager namespace deployment.apps "cert-manager" deleted from cert-manager namespace deployment.apps "cert-manager-webhook" deleted from cert-manager namespace mutatingwebhookconfiguration.admissionregistration.k8s.io "cert-manager-webhook" deleted validatingwebhookconfiguration.admissionregistration.k8s.io "cert-manager-webhook" deleted + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + rm -rf /tmp/tmp.bIebaWzBVY + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator + kubectl_bin delete --grace-period=0 --force=true namespace tls-issue-cert-manager-1530 ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.iZ3buhACT8 + local LAST_OUT=/tmp/tmp.ZxRj9QCeDw ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.vUeuf43sd8 + local exit_status=0 + local LAST_ERR=/tmp/tmp.Jq47VhCeKr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace tls-issue-cert-manager-1530