Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/logs/tls-issue-cert-manager-8-0.log Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 + main + create_infra tls-issue-cert-manager-15716 + local ns=tls-issue-cert-manager-15716 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n tls-issue-cert-manager-7731 some-name-tls-issue --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issue patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Wqs9Fflnmw ++ mktemp + local LAST_ERR=/tmp/tmp.1tVrat4mhb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Wqs9Fflnmw perconaxtradbcluster.pxc.percona.com "some-name-tls-issue" deleted from tls-issue-cert-manager-7731 namespace + cat /tmp/tmp.1tVrat4mhb + rm /tmp/tmp.Wqs9Fflnmw /tmp/tmp.1tVrat4mhb + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.88qJAq0HrX ++ mktemp + local LAST_ERR=/tmp/tmp.qepAg1fSZI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.88qJAq0HrX No resources found + cat /tmp/tmp.qepAg1fSZI + rm /tmp/tmp.88qJAq0HrX /tmp/tmp.qepAg1fSZI + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.O6EAGS5k55 ++ mktemp + local LAST_ERR=/tmp/tmp.ksiQz4Py5u + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.O6EAGS5k55 No resources found + cat /tmp/tmp.ksiQz4Py5u + rm /tmp/tmp.O6EAGS5k55 /tmp/tmp.ksiQz4Py5u + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl api-resources ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get clusterrole + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.tVZP9bweQI + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.BQPaLeeQwc ++ mktemp + local LAST_ERR=/tmp/tmp.OSEdlRGvs5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + local LAST_ERR=/tmp/tmp.u5QoA7PjNi + local exit_status=0 + awk '{print$1}' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tVZP9bweQI + cat /tmp/tmp.u5QoA7PjNi + rm /tmp/tmp.tVZP9bweQI /tmp/tmp.u5QoA7PjNi + return 0 namespace "cert-manager" deleted namespace "tls-issue-cert-manager-7731" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BQPaLeeQwc namespace "pxc-operator" deleted + cat /tmp/tmp.OSEdlRGvs5 + rm /tmp/tmp.BQPaLeeQwc /tmp/tmp.OSEdlRGvs5 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.mT2hlcaCF9 ++ mktemp + local LAST_ERR=/tmp/tmp.vikPfwgI4i + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mT2hlcaCF9 namespace/pxc-operator created + cat /tmp/tmp.vikPfwgI4i + rm /tmp/tmp.mT2hlcaCF9 /tmp/tmp.vikPfwgI4i + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.qPBPseGIun +++ mktemp ++ local LAST_ERR=/tmp/tmp.fdEczgzpVr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qPBPseGIun ++ cat /tmp/tmp.fdEczgzpVr ++ rm /tmp/tmp.qPBPseGIun /tmp/tmp.fdEczgzpVr ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2399-dbfcca1d-3-cluster5 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.6SvXeA1v6L ++ mktemp + local LAST_ERR=/tmp/tmp.z9BGTTrDAw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2399-dbfcca1d-3-cluster5 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6SvXeA1v6L Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2399-dbfcca1d-3-cluster5" modified. + cat /tmp/tmp.z9BGTTrDAw + rm /tmp/tmp.6SvXeA1v6L /tmp/tmp.z9BGTTrDAw + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.Nardf2uybg ++ mktemp + local LAST_ERR=/tmp/tmp.bzXfycda91 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Nardf2uybg customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.bzXfycda91 + rm /tmp/tmp.Nardf2uybg /tmp/tmp.bzXfycda91 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.19LpN2V63k ++ mktemp + local LAST_ERR=/tmp/tmp.atCedFZniV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.19LpN2V63k clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.atCedFZniV + rm /tmp/tmp.19LpN2V63k /tmp/tmp.atCedFZniV + return 0 + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "PXCO_FEATURE_GATES").value) = ""' - + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2399-dbfcca1d^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/deploy/cw-operator.yaml ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + local LAST_OUT=/tmp/tmp.j0TXeyuKSC + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + local LAST_ERR=/tmp/tmp.1s2ECBQBMI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.j0TXeyuKSC deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.1s2ECBQBMI + rm /tmp/tmp.j0TXeyuKSC /tmp/tmp.1s2ECBQBMI + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.torGg2J6zR ++ mktemp + local LAST_ERR=/tmp/tmp.aEwrTLPmWj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.torGg2J6zR pod/percona-xtradb-cluster-operator-c7445dd5-pmn7f condition met + cat /tmp/tmp.aEwrTLPmWj + rm /tmp/tmp.torGg2J6zR /tmp/tmp.aEwrTLPmWj + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ head -1 ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.2iHmP3oikS +++ mktemp ++ local LAST_ERR=/tmp/tmp.5Ku2UJz5hm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2iHmP3oikS ++ cat /tmp/tmp.5Ku2UJz5hm ++ rm /tmp/tmp.2iHmP3oikS /tmp/tmp.5Ku2UJz5hm ++ return 0 + wait_pod percona-xtradb-cluster-operator-c7445dd5-pmn7f 480 pxc-operator + local pod=percona-xtradb-cluster-operator-c7445dd5-pmn7f + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-c7445dd5-pmn7f ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-c7445dd5-pmn7f condition met waiting for pod/percona-xtradb-cluster-operator-c7445dd5-pmn7f to become Ready.Ok + sleep 3 + create_namespace tls-issue-cert-manager-15716 + local namespace=tls-issue-cert-manager-15716 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces tls-issue-cert-manager-15716' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces tls-issue-cert-manager-15716 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace tls-issue-cert-manager-15716 + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.2A41eJ5LB2 ++ mktemp + local LAST_ERR=/tmp/tmp.uNy3hgLpEj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + awk '{print$1}' + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.3Bf1tDEQys ++ mktemp + local LAST_ERR=/tmp/tmp.a2UO2gfDza + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-15716 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2A41eJ5LB2 + cat /tmp/tmp.uNy3hgLpEj + rm /tmp/tmp.2A41eJ5LB2 /tmp/tmp.uNy3hgLpEj + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-15716 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace tls-issue-cert-manager-15716 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.3Bf1tDEQys + cat /tmp/tmp.a2UO2gfDza Error from server (NotFound): namespaces "tls-issue-cert-manager-15716" not found + rm /tmp/tmp.3Bf1tDEQys /tmp/tmp.a2UO2gfDza + return 1 + : + wait_for_delete namespace/tls-issue-cert-manager-15716 + local res=namespace/tls-issue-cert-manager-15716 + echo -n 'waiting for namespace/tls-issue-cert-manager-15716 to be deleted' waiting for namespace/tls-issue-cert-manager-15716 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "tls-issue-cert-manager-15716" not found + desc 'create namespace tls-issue-cert-manager-15716' + set +o xtrace ----------------------------------------------------------------------------------- create namespace tls-issue-cert-manager-15716 ----------------------------------------------------------------------------------- + kubectl_bin create namespace tls-issue-cert-manager-15716 ++ mktemp + local LAST_OUT=/tmp/tmp.TzItoftnpH ++ mktemp + local LAST_ERR=/tmp/tmp.JxrflH3IKR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace tls-issue-cert-manager-15716 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TzItoftnpH namespace/tls-issue-cert-manager-15716 created + cat /tmp/tmp.JxrflH3IKR + rm /tmp/tmp.TzItoftnpH /tmp/tmp.JxrflH3IKR + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.D0oP6JgUIb +++ mktemp ++ local LAST_ERR=/tmp/tmp.X0XoQv5xrv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D0oP6JgUIb ++ cat /tmp/tmp.X0XoQv5xrv ++ rm /tmp/tmp.D0oP6JgUIb /tmp/tmp.X0XoQv5xrv ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2399-dbfcca1d-3-cluster5 --namespace=tls-issue-cert-manager-15716 ++ mktemp + local LAST_OUT=/tmp/tmp.ZgLIDkPIHx ++ mktemp + local LAST_ERR=/tmp/tmp.C5gcojAlzO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2399-dbfcca1d-3-cluster5 --namespace=tls-issue-cert-manager-15716 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZgLIDkPIHx Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2399-dbfcca1d-3-cluster5" modified. + cat /tmp/tmp.C5gcojAlzO + rm /tmp/tmp.ZgLIDkPIHx /tmp/tmp.C5gcojAlzO + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.mCgMGwUXSW ++ mktemp + local LAST_ERR=/tmp/tmp.Yk8POns1Kl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mCgMGwUXSW secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.Yk8POns1Kl + rm /tmp/tmp.mCgMGwUXSW /tmp/tmp.Yk8POns1Kl + return 0 + cluster=some-name-tls-issue + deploy_cert_manager + desc 'deploy cert manager' + set +o xtrace ----------------------------------------------------------------------------------- deploy cert manager ----------------------------------------------------------------------------------- + kubectl_bin create namespace cert-manager ++ mktemp + local LAST_OUT=/tmp/tmp.YbozlNzwAx ++ mktemp + local LAST_ERR=/tmp/tmp.g848gbHAjx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace cert-manager + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YbozlNzwAx namespace/cert-manager created + cat /tmp/tmp.g848gbHAjx + rm /tmp/tmp.YbozlNzwAx /tmp/tmp.g848gbHAjx + return 0 + kubectl_bin label namespace cert-manager certmanager.k8s.io/disable-validation=true ++ mktemp + local LAST_OUT=/tmp/tmp.16Nnd50ZZ6 ++ mktemp + local LAST_ERR=/tmp/tmp.Pbs2XVM7Tq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl label namespace cert-manager certmanager.k8s.io/disable-validation=true + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.16Nnd50ZZ6 namespace/cert-manager labeled + cat /tmp/tmp.Pbs2XVM7Tq + rm /tmp/tmp.16Nnd50ZZ6 /tmp/tmp.Pbs2XVM7Tq + return 0 + kubectl_bin apply -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml --validate=false ++ mktemp + local LAST_OUT=/tmp/tmp.SqekiRgZP7 ++ mktemp + local LAST_ERR=/tmp/tmp.OpwLEdQBqv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml --validate=false + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SqekiRgZP7 namespace/cert-manager configured customresourcedefinition.apiextensions.k8s.io/challenges.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/orders.acme.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificaterequests.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/certificates.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/clusterissuers.cert-manager.io unchanged customresourcedefinition.apiextensions.k8s.io/issuers.cert-manager.io unchanged serviceaccount/cert-manager-cainjector created serviceaccount/cert-manager created serviceaccount/cert-manager-webhook created clusterrole.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-cluster-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-view unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-edit unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrole.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-cainjector unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-issuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-clusterissuers unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificates unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-orders unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-challenges unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-ingress-shim unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-approve:cert-manager-io unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-controller-certificatesigningrequests unchanged clusterrolebinding.rbac.authorization.k8s.io/cert-manager-webhook:subjectaccessreviews unchanged role.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged role.rbac.authorization.k8s.io/cert-manager-tokenrequest created role.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created rolebinding.rbac.authorization.k8s.io/cert-manager-cainjector:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager:leaderelection unchanged rolebinding.rbac.authorization.k8s.io/cert-manager-tokenrequest created rolebinding.rbac.authorization.k8s.io/cert-manager-webhook:dynamic-serving created service/cert-manager-cainjector created service/cert-manager created service/cert-manager-webhook created deployment.apps/cert-manager-cainjector created deployment.apps/cert-manager created deployment.apps/cert-manager-webhook created mutatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured validatingwebhookconfiguration.admissionregistration.k8s.io/cert-manager-webhook configured + cat /tmp/tmp.OpwLEdQBqv Warning: resource namespaces/cert-manager is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.SqekiRgZP7 /tmp/tmp.OpwLEdQBqv + return 0 + '[' '' == 4.10 ']' + sleep 70 + desc 'create pxc cluster' + set +o xtrace ----------------------------------------------------------------------------------- create pxc cluster ----------------------------------------------------------------------------------- + spinup_pxc some-name-tls-issue /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml 3 10 /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/secrets_without_tls.yml /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/client.yml + local cluster=some-name-tls-issue + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/secrets_without_tls.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/secrets_without_tls.yml ++ mktemp + local LAST_OUT=/tmp/tmp.OZl7UMISBO ++ mktemp + local LAST_ERR=/tmp/tmp.33fRljAQNY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/secrets_without_tls.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OZl7UMISBO secret/my-cluster-secrets created + cat /tmp/tmp.33fRljAQNY + rm /tmp/tmp.OZl7UMISBO /tmp/tmp.33fRljAQNY + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/client.yml + kubectl_bin apply -f - + local pvc_name= + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/client.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2399-dbfcca1d#' + local LAST_OUT=/tmp/tmp.aelyR35vya + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-15716~ + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + local LAST_ERR=/tmp/tmp.MUaUTRLucJ + local exit_status=0 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aelyR35vya deployment.apps/pxc-client created + cat /tmp/tmp.MUaUTRLucJ + rm /tmp/tmp.aelyR35vya /tmp/tmp.MUaUTRLucJ + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + local pvc_name= + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml + local pvc_name= + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.tTYYxeu8Su + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2399-dbfcca1d#' + local LAST_ERR=/tmp/tmp.xawnyUX3cY + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-15716~ + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tTYYxeu8Su perconaxtradbcluster.pxc.percona.com/some-name-tls-issue created + cat /tmp/tmp.xawnyUX3cY + rm /tmp/tmp.tTYYxeu8Su /tmp/tmp.xawnyUX3cY + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name-tls-issue ++ local target_cluster=some-name-tls-issue +++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.CvNr6zTBuQ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.lpFJ0y4ItJ +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.CvNr6zTBuQ +++ cat /tmp/tmp.lpFJ0y4ItJ +++ rm /tmp/tmp.CvNr6zTBuQ /tmp/tmp.lpFJ0y4ItJ +++ return 0 ++ [[ false == \t\r\u\e ]] +++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.ItCcKmBHZq ++++ mktemp +++ local LAST_ERR=/tmp/tmp.7iUlmgEINb +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.ItCcKmBHZq +++ cat /tmp/tmp.7iUlmgEINb +++ rm /tmp/tmp.ItCcKmBHZq /tmp/tmp.7iUlmgEINb +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-tls-issue-proxysql ++ return + local proxy=some-name-tls-issue-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-15716 ++ mktemp + local LAST_OUT=/tmp/tmp.5AAqh0PlrY ++ mktemp + local LAST_ERR=/tmp/tmp.S56ellqSgg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-15716 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-15716 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n tls-issue-cert-manager-15716 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.5AAqh0PlrY + cat /tmp/tmp.S56ellqSgg error: no matching resources found + rm /tmp/tmp.5AAqh0PlrY /tmp/tmp.S56ellqSgg + return 1 + true + wait_for_running some-name-tls-issue-proxysql 1 + local name=some-name-tls-issue-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-proxysql-0 480 + local pod=some-name-tls-issue-proxysql-0 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo some-name-tls-issue-proxysql-0 + local container=proxysql + set +o xtrace Error from server (NotFound): pods "some-name-tls-issue-proxysql-0" not found waiting for pod/some-name-tls-issue-proxysql-0 to become Ready...........Ok + wait_for_running some-name-tls-issue-pxc 3 + local name=some-name-tls-issue-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-0 480 + local pod=some-name-tls-issue-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-0 condition met waiting for pod/some-name-tls-issue-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-1 480 + local pod=some-name-tls-issue-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-1 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-1 condition met waiting for pod/some-name-tls-issue-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-pxc-2 480 + local pod=some-name-tls-issue-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-pxc-2 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-tls-issue-pxc-2 condition met waiting for pod/some-name-tls-issue-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4uYyz3nyPf +++ mktemp ++ local LAST_ERR=/tmp/tmp.KZ8W0MGycs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4uYyz3nyPf ++ cat /tmp/tmp.KZ8W0MGycs ++ rm /tmp/tmp.4uYyz3nyPf /tmp/tmp.KZ8W0MGycs ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-tls-issue-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-tls-issue-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y8MOs8Q2bG +++ mktemp ++ local LAST_ERR=/tmp/tmp.VzvHNv3Coi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y8MOs8Q2bG ++ cat /tmp/tmp.VzvHNv3Coi ++ rm /tmp/tmp.Y8MOs8Q2bG /tmp/tmp.VzvHNv3Coi ++ return 0 + client_pod=pxc-client-64c657cf9f-4nb46 + wait_pod pxc-client-64c657cf9f-4nb46 + local pod=pxc-client-64c657cf9f-4nb46 + local max_retry=480 + local ns= ++ echo pxc-client-64c657cf9f-4nb46 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64c657cf9f-4nb46 condition met waiting for pod/pxc-client-64c657cf9f-4nb46 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-tls-issue-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-tls-issue-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kvuycNgexd +++ mktemp ++ local LAST_ERR=/tmp/tmp.a3o7eYgk46 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kvuycNgexd ++ cat /tmp/tmp.a3o7eYgk46 ++ rm /tmp/tmp.kvuycNgexd /tmp/tmp.a3o7eYgk46 ++ return 0 + client_pod=pxc-client-64c657cf9f-4nb46 + wait_pod pxc-client-64c657cf9f-4nb46 + local pod=pxc-client-64c657cf9f-4nb46 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-64c657cf9f-4nb46 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64c657cf9f-4nb46 condition met waiting for pod/pxc-client-64c657cf9f-4nb46 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-0.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-0.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-0.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-0.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j2Nbi2P6La +++ mktemp ++ local LAST_ERR=/tmp/tmp.3xDbTtJccW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j2Nbi2P6La ++ cat /tmp/tmp.3xDbTtJccW ++ rm /tmp/tmp.j2Nbi2P6La /tmp/tmp.3xDbTtJccW ++ return 0 + client_pod=pxc-client-64c657cf9f-4nb46 + wait_pod pxc-client-64c657cf9f-4nb46 + local pod=pxc-client-64c657cf9f-4nb46 + local max_retry=480 + local ns= ++ echo pxc-client-64c657cf9f-4nb46 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64c657cf9f-4nb46 condition met waiting for pod/pxc-client-64c657cf9f-4nb46 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.JPhhnNyild/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.JPhhnNyild/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/select-1.sql /tmp/tmp.JPhhnNyild/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-1.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-1.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-1.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-1.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ED5NDrwgnC +++ mktemp ++ local LAST_ERR=/tmp/tmp.DxfVTIqzxA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ED5NDrwgnC ++ cat /tmp/tmp.DxfVTIqzxA ++ rm /tmp/tmp.ED5NDrwgnC /tmp/tmp.DxfVTIqzxA ++ return 0 + client_pod=pxc-client-64c657cf9f-4nb46 + wait_pod pxc-client-64c657cf9f-4nb46 + local pod=pxc-client-64c657cf9f-4nb46 + local max_retry=480 + local ns= ++ echo pxc-client-64c657cf9f-4nb46 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64c657cf9f-4nb46 condition met waiting for pod/pxc-client-64c657cf9f-4nb46 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.JPhhnNyild/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.JPhhnNyild/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/select-1.sql /tmp/tmp.JPhhnNyild/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-2.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-2.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-tls-issue-pxc-2.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-tls-issue-pxc-2.some-name-tls-issue-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kEfZGbGF71 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Mu33RwjlVI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kEfZGbGF71 ++ cat /tmp/tmp.Mu33RwjlVI ++ rm /tmp/tmp.kEfZGbGF71 /tmp/tmp.Mu33RwjlVI ++ return 0 + client_pod=pxc-client-64c657cf9f-4nb46 + wait_pod pxc-client-64c657cf9f-4nb46 + local pod=pxc-client-64c657cf9f-4nb46 + local max_retry=480 + local ns= ++ echo pxc-client-64c657cf9f-4nb46 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64c657cf9f-4nb46 condition met waiting for pod/pxc-client-64c657cf9f-4nb46 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.JPhhnNyild/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.JPhhnNyild/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/select-1.sql /tmp/tmp.JPhhnNyild/select-1.sql + is_keyring_plugin_in_use some-name-tls-issue + local cluster=some-name-tls-issue + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + kubectl exec some-name-tls-issue-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + grep -E -o 'early-plugin-load=keyring_\w+.so' + return 1 + wait_cluster_consistency some-name-tls-issue 3 2 + local cluster_name=some-name-tls-issue + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name-tls-issue to be ready' waiting for pxc/some-name-tls-issue to be ready++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4QU5aWMVhk +++ mktemp ++ local LAST_ERR=/tmp/tmp.RavhtPpLhd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4QU5aWMVhk ++ cat /tmp/tmp.RavhtPpLhd ++ rm /tmp/tmp.4QU5aWMVhk /tmp/tmp.RavhtPpLhd ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dsj93awNiv +++ mktemp ++ local LAST_ERR=/tmp/tmp.ukuSYDyAER ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dsj93awNiv ++ cat /tmp/tmp.ukuSYDyAER ++ rm /tmp/tmp.dsj93awNiv /tmp/tmp.ukuSYDyAER ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issue +++ local cluster_name=some-name-tls-issue ++++ get_proxy some-name-tls-issue ++++ local target_cluster=some-name-tls-issue +++++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.3UM4RBzzZi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.on9a5u6CC9 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.3UM4RBzzZi +++++ cat /tmp/tmp.on9a5u6CC9 +++++ rm /tmp/tmp.3UM4RBzzZi /tmp/tmp.on9a5u6CC9 +++++ return 0 ++++ [[ false == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.A2OGTuyE5m ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bGh4i8Po8i +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.A2OGTuyE5m +++++ cat /tmp/tmp.bGh4i8Po8i +++++ rm /tmp/tmp.A2OGTuyE5m /tmp/tmp.bGh4i8Po8i +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issue-proxysql ++++ return +++ local cluster_proxy=some-name-tls-issue-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ugXOCQWnJS +++ mktemp ++ local LAST_ERR=/tmp/tmp.nWbVQtmJ14 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ugXOCQWnJS ++ cat /tmp/tmp.nWbVQtmJ14 ++ rm /tmp/tmp.ugXOCQWnJS /tmp/tmp.nWbVQtmJ14 ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check if certificates issued with certmanager' + set +o xtrace ----------------------------------------------------------------------------------- check if certificates issued with certmanager ----------------------------------------------------------------------------------- + tlsSecretsShouldExist some-name-tls-issue-ssl + local secretName=some-name-tls-issue-ssl + checkTLSSecret some-name-tls-issue-ssl ca.crt + local secretName=some-name-tls-issue-ssl + local dataKey=ca.crt ++ kubectl_bin get secrets/some-name-tls-issue-ssl -o json ++ jq '.data["ca.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZuholsNZim +++ mktemp ++ local LAST_ERR=/tmp/tmp.xMiIwcIMRK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issue-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZuholsNZim ++ cat /tmp/tmp.xMiIwcIMRK ++ rm /tmp/tmp.ZuholsNZim /tmp/tmp.xMiIwcIMRK ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURFakNDQWZxZ0F3SUJBZ0lVSDNhVkthNGdLc3B1K2IrZnE5cldHZDlraHc0d0RRWUpLb1pJaHZjTkFRRUwKQlFBd0lURWZNQjBHQTFVRUF4TVdjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaUzFqWVRBZUZ3MHlOakF6TVRZeApNek01TURsYUZ3MHlPREF4TWpNeU1UTTVNRGxhTUNFeEh6QWRCZ05WQkFNVEZuTnZiV1V0Ym1GdFpTMTBiSE10CmFYTnpkV1V0WTJFd2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUJEd0F3Z2dFS0FvSUJBUURFZVV3M2dmYjMKYjQ3R3lCRm5ycCtZZjJzMnRjVWZpeHhvWHgyZGJwSDRLOXBvbUg5cytyaWpMeS9zbzg0U2hoQVQ5ZWtmZS95bgoyNndmekkvS3lvN254eGUxRGJPVU4wc04xQUdEQ2JqVW5YSlJSUmo3Z0Y0YW4xaTZBYlRXSlhpNzRUVTYrMHI3ClQ2ZDkxVFhzMjd2emVleTZLbDlwM29XMWhWd0JOUG84T296S2FQNFAwU3pSc3V2TnB6Q09zbUQ3NlN5TGdZUUEKY211aHFXNkhBd2pUYTZWWGp2dngxVDBCMWk2dVJSQ3pnbEZWdEFpa1UxRytjQWo5Szl3LzIwRlM0K1paU1RONApTUmN4S1ZsMVlqbXdIcnZUU05paWpKR2RCVmNORkhEcHVhL1dNQ2M5VG01bUtUWWduS3I3MngySUxZR3FQODdpCldEMFdLRW9OMTRhN0FnTUJBQUdqUWpCQU1BNEdBMVVkRHdFQi93UUVBd0lDcERBUEJnTlZIUk1CQWY4RUJUQUQKQVFIL01CMEdBMVVkRGdRV0JCU3JDbHRvWlphSVl0MlJVL0hYWDNKQ0plZlBLakFOQmdrcWhraUc5dzBCQVFzRgpBQU9DQVFFQVlISU9mL2pvdVNrTXk1SEhWTEROWmEzRnhyYk9Mb3dLQjNnZWZ1WU5yd0loaVBJdk54RjBrMHhWCndXdHJCTEtzQzM5cXJTTkNRQnVmNnZNMlIwaXdKYkRpd0thK20zT3R6cThOOGpHQVRwV3RiMFN5RFhSRUtsT2wKQWFpdmxFbldPUHNpcDlWaVJFQTNnOXpiK0dEOTVuZ1hhdlhETmh4ZlkyZ1ZSSWhDck1Ncm9aZ1dHUHpYVzVvWQpqelhubTBIQXJ5Mlh0Z0szL1YzRzZuTnhjVTcrRTg1RFcrNUhaYWJ5V2F2ekpLQ0N6NU1HcGRGV2xRMU4xQkFyCmtsZnp6RHJ4ZDJIbVNkaUdZb0IwRTdrNjhFenpzaHk3ajVBUDlhMmx1SzY2WkdhUU1QTmFCc29hRXJCQndXK2gKcEhMYTRPSUhZVEMvMUJVeVB2ZVZkTHFIQ2drM2N3PT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURFakNDQWZxZ0F3SUJBZ0lVSDNhVkthNGdLc3B1K2IrZnE5cldHZDlraHc0d0RRWUpLb1pJaHZjTkFRRUwKQlFBd0lURWZNQjBHQTFVRUF4TVdjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaUzFqWVRBZUZ3MHlOakF6TVRZeApNek01TURsYUZ3MHlPREF4TWpNeU1UTTVNRGxhTUNFeEh6QWRCZ05WQkFNVEZuTnZiV1V0Ym1GdFpTMTBiSE10CmFYTnpkV1V0WTJFd2dnRWlNQTBHQ1NxR1NJYjNEUUVCQVFVQUE0SUJEd0F3Z2dFS0FvSUJBUURFZVV3M2dmYjMKYjQ3R3lCRm5ycCtZZjJzMnRjVWZpeHhvWHgyZGJwSDRLOXBvbUg5cytyaWpMeS9zbzg0U2hoQVQ5ZWtmZS95bgoyNndmekkvS3lvN254eGUxRGJPVU4wc04xQUdEQ2JqVW5YSlJSUmo3Z0Y0YW4xaTZBYlRXSlhpNzRUVTYrMHI3ClQ2ZDkxVFhzMjd2emVleTZLbDlwM29XMWhWd0JOUG84T296S2FQNFAwU3pSc3V2TnB6Q09zbUQ3NlN5TGdZUUEKY211aHFXNkhBd2pUYTZWWGp2dngxVDBCMWk2dVJSQ3pnbEZWdEFpa1UxRytjQWo5Szl3LzIwRlM0K1paU1RONApTUmN4S1ZsMVlqbXdIcnZUU05paWpKR2RCVmNORkhEcHVhL1dNQ2M5VG01bUtUWWduS3I3MngySUxZR3FQODdpCldEMFdLRW9OMTRhN0FnTUJBQUdqUWpCQU1BNEdBMVVkRHdFQi93UUVBd0lDcERBUEJnTlZIUk1CQWY4RUJUQUQKQVFIL01CMEdBMVVkRGdRV0JCU3JDbHRvWlphSVl0MlJVL0hYWDNKQ0plZlBLakFOQmdrcWhraUc5dzBCQVFzRgpBQU9DQVFFQVlISU9mL2pvdVNrTXk1SEhWTEROWmEzRnhyYk9Mb3dLQjNnZWZ1WU5yd0loaVBJdk54RjBrMHhWCndXdHJCTEtzQzM5cXJTTkNRQnVmNnZNMlIwaXdKYkRpd0thK20zT3R6cThOOGpHQVRwV3RiMFN5RFhSRUtsT2wKQWFpdmxFbldPUHNpcDlWaVJFQTNnOXpiK0dEOTVuZ1hhdlhETmh4ZlkyZ1ZSSWhDck1Ncm9aZ1dHUHpYVzVvWQpqelhubTBIQXJ5Mlh0Z0szL1YzRzZuTnhjVTcrRTg1RFcrNUhaYWJ5V2F2ekpLQ0N6NU1HcGRGV2xRMU4xQkFyCmtsZnp6RHJ4ZDJIbVNkaUdZb0IwRTdrNjhFenpzaHk3ajVBUDlhMmx1SzY2WkdhUU1QTmFCc29hRXJCQndXK2gKcEhMYTRPSUhZVEMvMUJVeVB2ZVZkTHFIQ2drM2N3PT0KLS0tLS1FTkQgQ0VSVElGSUNBVEUtLS0tLQo="' ']' + checkTLSSecret some-name-tls-issue-ssl tls.crt + local secretName=some-name-tls-issue-ssl + local dataKey=tls.crt ++ kubectl_bin get secrets/some-name-tls-issue-ssl -o json ++ jq '.data["tls.crt"]' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rQcRQiIg0U +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gwlw9Z0lmT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issue-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rQcRQiIg0U ++ cat /tmp/tmp.Gwlw9Z0lmT ++ rm /tmp/tmp.rQcRQiIg0U /tmp/tmp.Gwlw9Z0lmT ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURvVENDQW9tZ0F3SUJBZ0lVSWsyMktJZWhqQ05yUklHaWZsTTk2RjlVNWJVd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0lURWZNQjBHQTFVRUF4TVdjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaUzFqWVRBZUZ3MHlOakF6TVRZeApNek01TVRKYUZ3MHlOakExTURNeU1UTTVNVEphTUNjeEpUQWpCZ05WQkFNVEhITnZiV1V0Ym1GdFpTMTBiSE10CmFYTnpkV1V0Y0hKdmVIbHpjV3d3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQ3cKVFFaQlVlc1RrQytQSGRQd25JakJBSFlxT1NJTGtLUmpyaEVQZlF4T0lneUh6b2hxQ3VhM0hmK2VZaGpGejJEZQpQbDZnRlBZNGNWRWVuNWc5NFhyWmp4YkJWcHF2VHc0K0RaZ3NBcXB1TUpDcVlqbFJtbDBMN3lTa3lMMjBadVRECkRDa0tCclVITEV4ZGZwOGE5bzQ4aGZGcEs2aDIvM3FvaHhuMDdiZitHNFVhNmt0dG96Rm9obHV6UVZ0ak5FQ1IKTkxJMUlkdlAvcTRsaURmS0dIYm54QnpHZXlNUDVMeWJ3eGp0eTByYkZxWTNRVVhkQU5rTXFBbnFkbXZWRFY1NgozL1Y5WjRyODRqMFF5Kzc2OFpxaGJxUzJ0NWJKLzFXSGFRMzhFaGE1QnlLaEdlOExoak5pOW56SGdDanlMV1ZUCm53dHF5TVA1QXQ4L3dYNjRRUFlaQWdNQkFBR2pnY293Z2Njd0RnWURWUjBQQVFIL0JBUURBZ1dnTUF3R0ExVWQKRXdFQi93UUNNQUF3SHdZRFZSMGpCQmd3Rm9BVXF3cGJhR1dXaUdMZGtWUHgxMTl5UWlYbnp5b3dnWVVHQTFVZApFUVIrTUh5Q0YzTnZiV1V0Ym1GdFpTMTBiSE10YVhOemRXVXRjSGhqZ2h4emIyMWxMVzVoYldVdGRHeHpMV2x6CmMzVmxMWEJ5YjNoNWMzRnNnaGtxTG5OdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdVdGNIaGpnaDRxTG5OdmJXVXQKYm1GdFpTMTBiSE10YVhOemRXVXRjSEp2ZUhsemNXeUNDSFJsYzNRdVkyOXRNQTBHQ1NxR1NJYjNEUUVCQ3dVQQpBNElCQVFDOW1oY0hmR3hSbDlaa0FEMExnM21tUGJkSDFwc2FoeEZOa0VtQU5GdlVZd2VTWnRUSlB4Rzc1ekx3CnV2elRucnV6SmRWYm1tblBsRitJM2N4ZENYZllQbk5IWlNOcTMrcnBGTU9DL0tJeW1ReS9FTHdVT0ZXZytiTXAKRnZxRkFiL1ZtSTJqYkN0WHRvdFNLb0EvOE1KNkxmbWZKWU1KWEFCRGx5ckxLUUJ0R1RNcXcrRENoY0p2dEVJQQowWFY4UzdlWVZ2MTdwTWxSSFoxdENGREM2UVRtRzNrSzN4b2I5K21QOEZWdkcrZ0ZDZXkyWDBNazgwRC9uaDljCk5SdHdtVHUyTlg2RmtFazl5cUFPZnQ0cmdVSmVNZVFSajRmOUJIVVlJUmhrZVY4UW84cnlZd3RhRS94emMvSkYKTmVXdWsrVmN4VEI1QmxaclJ6QjFTMm9mSDZ5aAotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSURvVENDQW9tZ0F3SUJBZ0lVSWsyMktJZWhqQ05yUklHaWZsTTk2RjlVNWJVd0RRWUpLb1pJaHZjTkFRRUwKQlFBd0lURWZNQjBHQTFVRUF4TVdjMjl0WlMxdVlXMWxMWFJzY3kxcGMzTjFaUzFqWVRBZUZ3MHlOakF6TVRZeApNek01TVRKYUZ3MHlOakExTURNeU1UTTVNVEphTUNjeEpUQWpCZ05WQkFNVEhITnZiV1V0Ym1GdFpTMTBiSE10CmFYTnpkV1V0Y0hKdmVIbHpjV3d3Z2dFaU1BMEdDU3FHU0liM0RRRUJBUVVBQTRJQkR3QXdnZ0VLQW9JQkFRQ3cKVFFaQlVlc1RrQytQSGRQd25JakJBSFlxT1NJTGtLUmpyaEVQZlF4T0lneUh6b2hxQ3VhM0hmK2VZaGpGejJEZQpQbDZnRlBZNGNWRWVuNWc5NFhyWmp4YkJWcHF2VHc0K0RaZ3NBcXB1TUpDcVlqbFJtbDBMN3lTa3lMMjBadVRECkRDa0tCclVITEV4ZGZwOGE5bzQ4aGZGcEs2aDIvM3FvaHhuMDdiZitHNFVhNmt0dG96Rm9obHV6UVZ0ak5FQ1IKTkxJMUlkdlAvcTRsaURmS0dIYm54QnpHZXlNUDVMeWJ3eGp0eTByYkZxWTNRVVhkQU5rTXFBbnFkbXZWRFY1NgozL1Y5WjRyODRqMFF5Kzc2OFpxaGJxUzJ0NWJKLzFXSGFRMzhFaGE1QnlLaEdlOExoak5pOW56SGdDanlMV1ZUCm53dHF5TVA1QXQ4L3dYNjRRUFlaQWdNQkFBR2pnY293Z2Njd0RnWURWUjBQQVFIL0JBUURBZ1dnTUF3R0ExVWQKRXdFQi93UUNNQUF3SHdZRFZSMGpCQmd3Rm9BVXF3cGJhR1dXaUdMZGtWUHgxMTl5UWlYbnp5b3dnWVVHQTFVZApFUVIrTUh5Q0YzTnZiV1V0Ym1GdFpTMTBiSE10YVhOemRXVXRjSGhqZ2h4emIyMWxMVzVoYldVdGRHeHpMV2x6CmMzVmxMWEJ5YjNoNWMzRnNnaGtxTG5OdmJXVXRibUZ0WlMxMGJITXRhWE56ZFdVdGNIaGpnaDRxTG5OdmJXVXQKYm1GdFpTMTBiSE10YVhOemRXVXRjSEp2ZUhsemNXeUNDSFJsYzNRdVkyOXRNQTBHQ1NxR1NJYjNEUUVCQ3dVQQpBNElCQVFDOW1oY0hmR3hSbDlaa0FEMExnM21tUGJkSDFwc2FoeEZOa0VtQU5GdlVZd2VTWnRUSlB4Rzc1ekx3CnV2elRucnV6SmRWYm1tblBsRitJM2N4ZENYZllQbk5IWlNOcTMrcnBGTU9DL0tJeW1ReS9FTHdVT0ZXZytiTXAKRnZxRkFiL1ZtSTJqYkN0WHRvdFNLb0EvOE1KNkxmbWZKWU1KWEFCRGx5ckxLUUJ0R1RNcXcrRENoY0p2dEVJQQowWFY4UzdlWVZ2MTdwTWxSSFoxdENGREM2UVRtRzNrSzN4b2I5K21QOEZWdkcrZ0ZDZXkyWDBNazgwRC9uaDljCk5SdHdtVHUyTlg2RmtFazl5cUFPZnQ0cmdVSmVNZVFSajRmOUJIVVlJUmhrZVY4UW84cnlZd3RhRS94emMvSkYKTmVXdWsrVmN4VEI1QmxaclJ6QjFTMm9mSDZ5aAotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg=="' ']' + checkTLSSecret some-name-tls-issue-ssl tls.key + local secretName=some-name-tls-issue-ssl + local dataKey=tls.key ++ kubectl_bin get secrets/some-name-tls-issue-ssl -o json +++ mktemp ++ jq '.data["tls.key"]' ++ local LAST_OUT=/tmp/tmp.ptDwhlbV9n +++ mktemp ++ local LAST_ERR=/tmp/tmp.KtxksQpYRk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/some-name-tls-issue-ssl -o json ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ptDwhlbV9n ++ cat /tmp/tmp.KtxksQpYRk ++ rm /tmp/tmp.ptDwhlbV9n /tmp/tmp.KtxksQpYRk ++ return 0 + local 'secretData="LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb3dJQkFBS0NBUUVBc0UwR1FWSHJFNUF2angzVDhKeUl3UUIyS2praUM1Q2tZNjRSRDMwTVRpSU1oODZJCmFncm10eDMvbm1JWXhjOWczajVlb0JUMk9IRlJIcCtZUGVGNjJZOFd3VmFhcjA4T1BnMllMQUtxYmpDUXFtSTUKVVpwZEMrOGtwTWk5dEdia3d3d3BDZ2ExQnl4TVhYNmZHdmFPUElYeGFTdW9kdjk2cUljWjlPMjMvaHVGR3VwTApiYU14YUlaYnMwRmJZelJBa1RTeU5TSGJ6LzZ1SllnM3loaDI1OFFjeG5zakQrUzhtOE1ZN2N0SzJ4YW1OMEZGCjNRRFpES2dKNm5acjFRMWVldC8xZldlSy9PSTlFTXZ1K3ZHYW9XNmt0cmVXeWY5Vmgya04vQklXdVFjaW9SbnYKQzRZell2Wjh4NEFvOGkxbFU1OExhc2pEK1FMZlA4Rit1RUQyR1FJREFRQUJBb0lCQUFSTTNuSVVVeDArUkl4OApXQnFIWEVLVWQ3OHNrNVd1TDltbkNBOUhOaDlMR2Z5eTdZV08za3AzbWt4MTdNSGdONS9pWGN2WFhpbXRMSlRQCmE0aXRHQlFpcU02bms4ekdPVzdTUldhQUZVR3plVldsdHBZMG8xeVp0d2MyWXl6Yy9FN1JvM0tmNyt4M2JFSlIKVlNGS0E5a01obk5EZnBNODcyWDh1aFFpVjlKMkdRQllhVVZ1TDVvcldCbkdOU3kxRTlrMHJiall2eEcwSFcvQQpPbFpyZXp1TzZBZjVZV0VwTmowYUorWWZ5eEVlRW1sMmVXT2lmNUlkTlN2VUhNa3habEg4RVBsSHc1RFU4a3I0CjhHSUZkbzdYWGpuOWRScmtmZVBhc1R0VUI1amdxRWlvMTF6Z0xlZDdGcUlUTW1BN3R5SkhzZWw5aDVRb1MrNHEKUUU0Z3FJMENnWUVBNFQ5bWJCWllMd1dDZ2FRQzhRemRnMHRreE1XLzhoMHFWdktqYm90eWErWTRvVVZyQy9qbAp2REF4cHJEQkJXd2xzK0s3djZCKytwWS9kN2JpdENEbk9kRW1aQWJsL0lKT1NJWGNDTmZ2L1liY04xY0Q2MUQ0CmM4SmxVSTlLaTlXQ2dRUEpyWG1FYkNhS1dzYkttOEVKdVJJa2pPN0R1cnRmaWI5YkZ4YnFKbU1DZ1lFQXlGN2sKWk8xTEJacWNrWldGSjNJdWI2NklsZktYSmRLMFRwdXFDTk05QnFoeVZaWTVtWmVXSWNUWkRHbXVTRFNwZElOawpHSHY2YURzc05Fd3RFc0pmUktwN1FzYkRRaDUzVnkvZDNzdFRPdkNYSWJwdUM0biszN2lrd3F1NlVOd3VqZS9wCkI4MndqSTJRUVAzRHphbE42UGdXcUZ2NU1TOUgwdklGTE5VRXJGTUNnWUFndDJPNkNWalBvNTQ4RkVWMUc2aEUKL0x0ck5Vblc4ZE9IZzRDbmVzdW96bXlsUi83dll6eURKQURpTkVRazlIdFpjSEpTdUE0WmVaOUlWU0VDaHREdQovZWowbUdmcFVFZnNrcnpOOEFka3JLVHB5TkxCNkNYZU1uc2l3MTFSaURhSFpMUnpaUFlOKzE4WEprNmhadDF4CnF5Zng3SEF6ZkNtNVNUa0Mxb28xL3dLQmdFRVNzRkltem9xUFgvSFlBVnFDTnBxR3NFMGliL3dVcGdJeGc5TkIKNXp2V1VPdUMwa29UZE9xYXAzTGtqM2xid1B6Wi94UUVqNXB5UFRUdTJHU1RRNlZsU0lxelNvNmFubDl1YUR3SApCSXpMdmdseUpuTVE2ZmwyV2V2dklaOUpJc1Z0K054VDdOWjQwelNJTmFNQkxSQlFJZXVqa3hacUtJVlZuR0NtCi9kR0pBb0dCQUxyZHd4UkpTK2d5L2xwM1VSUFdkaTBVY1huRDFxNWt4dkxzb1pXQmNWSEVUamN2eGNFbGpUeSsKY21ydUJwMUYyUkJmdlFGTWtTUUdhakd4KzVJQWZuZm9FTUJ1blZWVHBLNE5HNldmS0FGTXdYS1JleGVSSE1kOAo0cXJLcEg5YStTa2ZFRmo3eGRBOGt5K2kxYmN5OXBLN2E2Wlg5QVlHTlI5OHBjUmxxSTNICi0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' + '[' -z '"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlFb3dJQkFBS0NBUUVBc0UwR1FWSHJFNUF2angzVDhKeUl3UUIyS2praUM1Q2tZNjRSRDMwTVRpSU1oODZJCmFncm10eDMvbm1JWXhjOWczajVlb0JUMk9IRlJIcCtZUGVGNjJZOFd3VmFhcjA4T1BnMllMQUtxYmpDUXFtSTUKVVpwZEMrOGtwTWk5dEdia3d3d3BDZ2ExQnl4TVhYNmZHdmFPUElYeGFTdW9kdjk2cUljWjlPMjMvaHVGR3VwTApiYU14YUlaYnMwRmJZelJBa1RTeU5TSGJ6LzZ1SllnM3loaDI1OFFjeG5zakQrUzhtOE1ZN2N0SzJ4YW1OMEZGCjNRRFpES2dKNm5acjFRMWVldC8xZldlSy9PSTlFTXZ1K3ZHYW9XNmt0cmVXeWY5Vmgya04vQklXdVFjaW9SbnYKQzRZell2Wjh4NEFvOGkxbFU1OExhc2pEK1FMZlA4Rit1RUQyR1FJREFRQUJBb0lCQUFSTTNuSVVVeDArUkl4OApXQnFIWEVLVWQ3OHNrNVd1TDltbkNBOUhOaDlMR2Z5eTdZV08za3AzbWt4MTdNSGdONS9pWGN2WFhpbXRMSlRQCmE0aXRHQlFpcU02bms4ekdPVzdTUldhQUZVR3plVldsdHBZMG8xeVp0d2MyWXl6Yy9FN1JvM0tmNyt4M2JFSlIKVlNGS0E5a01obk5EZnBNODcyWDh1aFFpVjlKMkdRQllhVVZ1TDVvcldCbkdOU3kxRTlrMHJiall2eEcwSFcvQQpPbFpyZXp1TzZBZjVZV0VwTmowYUorWWZ5eEVlRW1sMmVXT2lmNUlkTlN2VUhNa3habEg4RVBsSHc1RFU4a3I0CjhHSUZkbzdYWGpuOWRScmtmZVBhc1R0VUI1amdxRWlvMTF6Z0xlZDdGcUlUTW1BN3R5SkhzZWw5aDVRb1MrNHEKUUU0Z3FJMENnWUVBNFQ5bWJCWllMd1dDZ2FRQzhRemRnMHRreE1XLzhoMHFWdktqYm90eWErWTRvVVZyQy9qbAp2REF4cHJEQkJXd2xzK0s3djZCKytwWS9kN2JpdENEbk9kRW1aQWJsL0lKT1NJWGNDTmZ2L1liY04xY0Q2MUQ0CmM4SmxVSTlLaTlXQ2dRUEpyWG1FYkNhS1dzYkttOEVKdVJJa2pPN0R1cnRmaWI5YkZ4YnFKbU1DZ1lFQXlGN2sKWk8xTEJacWNrWldGSjNJdWI2NklsZktYSmRLMFRwdXFDTk05QnFoeVZaWTVtWmVXSWNUWkRHbXVTRFNwZElOawpHSHY2YURzc05Fd3RFc0pmUktwN1FzYkRRaDUzVnkvZDNzdFRPdkNYSWJwdUM0biszN2lrd3F1NlVOd3VqZS9wCkI4MndqSTJRUVAzRHphbE42UGdXcUZ2NU1TOUgwdklGTE5VRXJGTUNnWUFndDJPNkNWalBvNTQ4RkVWMUc2aEUKL0x0ck5Vblc4ZE9IZzRDbmVzdW96bXlsUi83dll6eURKQURpTkVRazlIdFpjSEpTdUE0WmVaOUlWU0VDaHREdQovZWowbUdmcFVFZnNrcnpOOEFka3JLVHB5TkxCNkNYZU1uc2l3MTFSaURhSFpMUnpaUFlOKzE4WEprNmhadDF4CnF5Zng3SEF6ZkNtNVNUa0Mxb28xL3dLQmdFRVNzRkltem9xUFgvSFlBVnFDTnBxR3NFMGliL3dVcGdJeGc5TkIKNXp2V1VPdUMwa29UZE9xYXAzTGtqM2xid1B6Wi94UUVqNXB5UFRUdTJHU1RRNlZsU0lxelNvNmFubDl1YUR3SApCSXpMdmdseUpuTVE2ZmwyV2V2dklaOUpJc1Z0K054VDdOWjQwelNJTmFNQkxSQlFJZXVqa3hacUtJVlZuR0NtCi9kR0pBb0dCQUxyZHd4UkpTK2d5L2xwM1VSUFdkaTBVY1huRDFxNWt4dkxzb1pXQmNWSEVUamN2eGNFbGpUeSsKY21ydUJwMUYyUkJmdlFGTWtTUUdhakd4KzVJQWZuZm9FTUJ1blZWVHBLNE5HNldmS0FGTXdYS1JleGVSSE1kOAo0cXJLcEg5YStTa2ZFRmo3eGRBOGt5K2kxYmN5OXBLN2E2Wlg5QVlHTlI5OHBjUmxxSTNICi0tLS0tRU5EIFJTQSBQUklWQVRFIEtFWS0tLS0tCg=="' ']' + desc 'check if CA issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if CA issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-tls-issue-pxc-ca-issuer + local resource=issuer/some-name-tls-issue-pxc-ca-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer.yml + local new_result=/tmp/tmp.JPhhnNyild/issuer_some-name-tls-issue-pxc-ca-issuer.yml + desc 'compare issuer/some-name-tls-issue-pxc-ca-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare issuer/some-name-tls-issue-pxc-ca-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k129.yml ']' + version_gt 1.27 ++ bc -l ++ echo '1.32 >= 1.27' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k124.yml ']' + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k122.yml ']' + version_gt 1.21 ++ bc -l ++ echo '1.32 >= 1.21' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-oc.yml ']' + version_gt 1.29 ++ bc -l ++ echo '1.32 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer-aks.yml ']' + kubectl_bin get -o yaml issuer/some-name-tls-issue-pxc-ca-issuer ++ mktemp + local LAST_OUT=/tmp/tmp.INinWXTXHB + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-15716", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_ERR=/tmp/tmp.DpCuGhDrFS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-tls-issue-pxc-ca-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.INinWXTXHB + cat /tmp/tmp.DpCuGhDrFS + rm /tmp/tmp.INinWXTXHB /tmp/tmp.DpCuGhDrFS + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-ca-issuer.yml /tmp/tmp.JPhhnNyild/issuer_some-name-tls-issue-pxc-ca-issuer.yml + log 'compare_kubectl: issuer/some-name-tls-issue-pxc-ca-issuer OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T13:45:06+0000]' compare_kubectl: issuer/some-name-tls-issue-pxc-ca-issuer OK [2026-03-16T13:45:06+0000] compare_kubectl: issuer/some-name-tls-issue-pxc-ca-issuer OK + desc 'check if issuer created' + set +o xtrace ----------------------------------------------------------------------------------- check if issuer created ----------------------------------------------------------------------------------- + compare_kubectl issuer/some-name-tls-issue-pxc-issuer + local resource=issuer/some-name-tls-issue-pxc-issuer + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer.yml + local new_result=/tmp/tmp.JPhhnNyild/issuer_some-name-tls-issue-pxc-issuer.yml + desc 'compare issuer/some-name-tls-issue-pxc-issuer-' + set +o xtrace ----------------------------------------------------------------------------------- compare issuer/some-name-tls-issue-pxc-issuer- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k129.yml ']' + version_gt 1.27 ++ bc -l ++ echo '1.32 >= 1.27' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k124.yml ']' + version_gt 1.22 ++ bc -l ++ echo '1.32 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer-aks.yml ']' + kubectl_bin get -o yaml issuer/some-name-tls-issue-pxc-issuer + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-15716", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.uScl6aNZjH ++ mktemp + local LAST_ERR=/tmp/tmp.uG3F9yIUPx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml issuer/some-name-tls-issue-pxc-issuer + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uScl6aNZjH + cat /tmp/tmp.uG3F9yIUPx + rm /tmp/tmp.uScl6aNZjH /tmp/tmp.uG3F9yIUPx + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/issuer_some-name-tls-issue-pxc-issuer.yml /tmp/tmp.JPhhnNyild/issuer_some-name-tls-issue-pxc-issuer.yml + log 'compare_kubectl: issuer/some-name-tls-issue-pxc-issuer OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T13:45:07+0000]' compare_kubectl: issuer/some-name-tls-issue-pxc-issuer OK [2026-03-16T13:45:07+0000] compare_kubectl: issuer/some-name-tls-issue-pxc-issuer OK + desc 'check if certificate issued' + set +o xtrace ----------------------------------------------------------------------------------- check if certificate issued ----------------------------------------------------------------------------------- + compare_kubectl certificate/some-name-tls-issue-ssl + local resource=certificate/some-name-tls-issue-ssl + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl.yml + local new_result=/tmp/tmp.JPhhnNyild/certificate_some-name-tls-issue-ssl.yml + desc 'compare certificate/some-name-tls-issue-ssl-' + set +o xtrace ----------------------------------------------------------------------------------- compare certificate/some-name-tls-issue-ssl- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k129.yml ']' + version_gt 1.27 ++ bc -l ++ echo '1.32 >= 1.27' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k124.yml ']' + version_gt 1.22 ++ bc -l ++ echo '1.32 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-oc.yml ']' + version_gt 1.29 ++ bc -l ++ echo '1.32 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("tls-issue-cert-manager-15716", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml certificate/some-name-tls-issue-ssl ++ mktemp + local LAST_OUT=/tmp/tmp.dOcouvDYU0 ++ mktemp + local LAST_ERR=/tmp/tmp.5KLF1ZYemt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml certificate/some-name-tls-issue-ssl + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dOcouvDYU0 + cat /tmp/tmp.5KLF1ZYemt + rm /tmp/tmp.dOcouvDYU0 /tmp/tmp.5KLF1ZYemt + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/compare/certificate_some-name-tls-issue-ssl.yml /tmp/tmp.JPhhnNyild/certificate_some-name-tls-issue-ssl.yml + log 'compare_kubectl: certificate/some-name-tls-issue-ssl OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T13:45:08+0000]' compare_kubectl: certificate/some-name-tls-issue-ssl OK [2026-03-16T13:45:08+0000] compare_kubectl: certificate/some-name-tls-issue-ssl OK + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue-haproxy.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue-haproxy.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue-haproxy.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue-haproxy.yml + kubectl_bin apply -f - + local pvc_name= ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2399-dbfcca1d#' + local LAST_OUT=/tmp/tmp.KAiNBu49F3 + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.tls-issue-cert-manager-15716~ + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + local LAST_ERR=/tmp/tmp.KoK3LbJP7L + local exit_status=0 + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/tls-issue-cert-manager/conf/some-name-tls-issue-haproxy.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KAiNBu49F3 perconaxtradbcluster.pxc.percona.com/some-name-tls-issue configured + cat /tmp/tmp.KoK3LbJP7L + rm /tmp/tmp.KAiNBu49F3 /tmp/tmp.KoK3LbJP7L + return 0 + wait_for_running some-name-tls-issue-haproxy 1 + local name=some-name-tls-issue-haproxy + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-tls-issue-haproxy-0 480 + local pod=some-name-tls-issue-haproxy-0 + local max_retry=480 + local ns= ++ echo some-name-tls-issue-haproxy-0 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/some-name-tls-issue-haproxy-0 condition met waiting for pod/some-name-tls-issue-haproxy-0 to become Ready.Ok + wait_cluster_consistency some-name-tls-issue 3 2 + local cluster_name=some-name-tls-issue + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name-tls-issue to be ready' waiting for pxc/some-name-tls-issue to be ready++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YmgFGGN2oM +++ mktemp ++ local LAST_ERR=/tmp/tmp.5aO3qiEKYF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YmgFGGN2oM ++ cat /tmp/tmp.5aO3qiEKYF ++ rm /tmp/tmp.YmgFGGN2oM /tmp/tmp.5aO3qiEKYF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LA5Ot73XiL +++ mktemp ++ local LAST_ERR=/tmp/tmp.wZ9O6IPFER ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LA5Ot73XiL ++ cat /tmp/tmp.wZ9O6IPFER ++ rm /tmp/tmp.LA5Ot73XiL /tmp/tmp.wZ9O6IPFER ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lETRXYb9vO +++ mktemp ++ local LAST_ERR=/tmp/tmp.FYrXEBmnjs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lETRXYb9vO ++ cat /tmp/tmp.FYrXEBmnjs ++ rm /tmp/tmp.lETRXYb9vO /tmp/tmp.FYrXEBmnjs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iZEOARGnw5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0BaNp047kR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iZEOARGnw5 ++ cat /tmp/tmp.0BaNp047kR ++ rm /tmp/tmp.iZEOARGnw5 /tmp/tmp.0BaNp047kR ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f8ewCUUtO9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cgNixI98ZN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f8ewCUUtO9 ++ cat /tmp/tmp.cgNixI98ZN ++ rm /tmp/tmp.f8ewCUUtO9 /tmp/tmp.cgNixI98ZN ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name-tls-issue +++ local cluster_name=some-name-tls-issue ++++ get_proxy some-name-tls-issue ++++ local target_cluster=some-name-tls-issue +++++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xVJSUJecHd ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.qJp9iqXPQN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xVJSUJecHd +++++ cat /tmp/tmp.qJp9iqXPQN +++++ rm /tmp/tmp.xVJSUJecHd /tmp/tmp.qJp9iqXPQN +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-tls-issue-haproxy ++++ return +++ local cluster_proxy=some-name-tls-issue-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name-tls-issue -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FWFXaIa1jx +++ mktemp ++ local LAST_ERR=/tmp/tmp.CRpG7HdJ2c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name-tls-issue -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FWFXaIa1jx ++ cat /tmp/tmp.CRpG7HdJ2c ++ rm /tmp/tmp.FWFXaIa1jx /tmp/tmp.CRpG7HdJ2c ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check ssl-internal certificate using PXC' + set +o xtrace ----------------------------------------------------------------------------------- check ssl-internal certificate using PXC ----------------------------------------------------------------------------------- + check_verify_identity some-name-tls-issue-pxc + local host=some-name-tls-issue-pxc + local command=exit + local 'args=--ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' + kubectl_bin exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' ++ mktemp + local LAST_OUT=/tmp/tmp.txm5WVactE ++ mktemp + local LAST_ERR=/tmp/tmp.dyPso0PTMa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-pxc' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.txm5WVactE + cat /tmp/tmp.dyPso0PTMa mysql: [Warning] Using a password on the command line interface can be insecure. + rm /tmp/tmp.txm5WVactE /tmp/tmp.dyPso0PTMa + return 0 + desc 'check ssl-internal certificate using HAProxy' + set +o xtrace ----------------------------------------------------------------------------------- check ssl-internal certificate using HAProxy ----------------------------------------------------------------------------------- + check_verify_identity some-name-tls-issue-haproxy + local host=some-name-tls-issue-haproxy + local command=exit + local 'args=--ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' + kubectl_bin exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' ++ mktemp + local LAST_OUT=/tmp/tmp.ptC4CJz3mM ++ mktemp + local LAST_ERR=/tmp/tmp.3myftDws38 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec some-name-tls-issue-pxc-0 -- bash -c 'printf '\''%s\n'\'' "exit" | mysql -sN --ssl-ca=/etc/mysql/ssl-internal/ca.crt --ssl-mode=VERIFY_IDENTITY --protocol=tcp -uroot -proot_password --host=some-name-tls-issue-haproxy' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ptC4CJz3mM + cat /tmp/tmp.3myftDws38 mysql: [Warning] Using a password on the command line interface can be insecure. + rm /tmp/tmp.ptC4CJz3mM /tmp/tmp.3myftDws38 + return 0 + destroy tls-issue-cert-manager-15716 + local namespace=tls-issue-cert-manager-15716 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + sort -u + tee /tmp/tmp.JPhhnNyild/operator.log +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ head -1 +++ mktemp ++ local LAST_OUT=/tmp/tmp.tNjIOMV21h +++ mktemp ++ local LAST_ERR=/tmp/tmp.0ibGGWV652 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tNjIOMV21h ++ cat /tmp/tmp.0ibGGWV652 ++ rm /tmp/tmp.tNjIOMV21h /tmp/tmp.0ibGGWV652 ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-c7445dd5-pmn7f ++ mktemp + local LAST_OUT=/tmp/tmp.cZzFfs22Xe ++ mktemp + local LAST_ERR=/tmp/tmp.PkUnYfOMOU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-c7445dd5-pmn7f + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cZzFfs22Xe + cat /tmp/tmp.PkUnYfOMOU + rm /tmp/tmp.cZzFfs22Xe /tmp/tmp.PkUnYfOMOU + return 0 2026-03-16T13:37:00.909Z INFO setup Feature gates {"PXCO_FEATURE_GATES": "", "enabled": ""} 2026-03-16T13:37:00.909Z INFO setup Manager starting up {"gitCommit": "dbfcca1d524d8d638feb3a7673ad33f836c01f0b", "gitBranch": "PR-2399-dbfcca1d", "buildTime": "2026-03-16T10:07:24Z", "goVersion": "go1.25.8", "os": "linux", "arch": "amd64"} 2026-03-16T13:37:00.909Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.32.13-gke.1059000"} 2026-03-16T13:37:00.912Z INFO setup Registering Components. 2026-03-16T13:37:01.592Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2026-03-16T13:37:01.592Z INFO controller-runtime.metrics Starting metrics server 2026-03-16T13:37:01.592Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2026-03-16T13:37:01.592Z INFO controller-runtime.webhook Starting webhook server 2026-03-16T13:37:01.592Z INFO setup Starting the Cmd. 2026-03-16T13:37:01.592Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2026-03-16T13:37:01.593Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2026-03-16T13:37:01.593Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2026-03-16T13:37:01.593Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2026-03-16T13:37:01.694Z INFO Attempting to acquire leader lease... {"lock": "pxc-operator/08db1feb.percona.com"} 2026-03-16T13:37:01.722Z DEBUG events percona-xtradb-cluster-operator-c7445dd5-pmn7f_a81264e3-ad82-4b5a-bf2e-749d6475755b became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"a0b9fd0c-c6bf-41ba-b11c-ee1012600bd6","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1773668221717039009"}, "reason": "LeaderElection"} 2026-03-16T13:37:01.722Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2026-03-16T13:37:01.722Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2026-03-16T13:37:01.722Z INFO Successfully acquired lease {"lock": "pxc-operator/08db1feb.percona.com"} 2026-03-16T13:37:01.723Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2026-03-16T13:37:01.723Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2026-03-16T13:37:01.823Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2026-03-16T13:37:01.823Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2026-03-16T13:37:01.823Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2026-03-16T13:37:01.823Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2026-03-16T13:37:01.824Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2026-03-16T13:37:01.824Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2026-03-16T13:39:08.592Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "c30ef8fd-2dd8-48a3-8ed7-5835b7601707", "version": "1.20.0"} 2026-03-16T13:39:09.095Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "c30ef8fd-2dd8-48a3-8ed7-5835b7601707"} 2026-03-16T13:39:12.151Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "c30ef8fd-2dd8-48a3-8ed7-5835b7601707"} 2026-03-16T13:39:12.177Z INFO spec.privateKey.rotationPolicy: In cert-manager >= v1.18.0, the default value changed from `Never` to `Always`. {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "c30ef8fd-2dd8-48a3-8ed7-5835b7601707"} 2026-03-16T13:39:15.279Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "c30ef8fd-2dd8-48a3-8ed7-5835b7601707", "object": "auto-some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2026-03-16T13:39:15.403Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "c30ef8fd-2dd8-48a3-8ed7-5835b7601707", "object": "some-name-tls-issue-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-03-16T13:39:15.451Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "c30ef8fd-2dd8-48a3-8ed7-5835b7601707", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-03-16T13:39:15.511Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "c30ef8fd-2dd8-48a3-8ed7-5835b7601707", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T13:39:15.542Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "c30ef8fd-2dd8-48a3-8ed7-5835b7601707", "object": "some-name-tls-issue-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T13:39:15.598Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "c30ef8fd-2dd8-48a3-8ed7-5835b7601707", "object": "some-name-tls-issue-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T13:39:15.704Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "c30ef8fd-2dd8-48a3-8ed7-5835b7601707", "object": "some-name-tls-issue-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T13:39:16.650Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "6eda2799-f036-43d6-98f8-c36eedae85b4", "object": "some-name-tls-issue-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-03-16T13:39:16.672Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "6eda2799-f036-43d6-98f8-c36eedae85b4", "object": "some-name-tls-issue-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-03-16T13:40:38.628Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "9c38a061-7456-4031-bddd-66dd18a9a277", "user": "operator"} 2026-03-16T13:40:38.659Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "9c38a061-7456-4031-bddd-66dd18a9a277", "user": "monitor"} 2026-03-16T13:40:38.706Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "9c38a061-7456-4031-bddd-66dd18a9a277"} 2026-03-16T13:40:38.739Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "9c38a061-7456-4031-bddd-66dd18a9a277"} 2026-03-16T13:40:38.772Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "9c38a061-7456-4031-bddd-66dd18a9a277", "user": "xtrabackup"} 2026-03-16T13:40:38.814Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "9c38a061-7456-4031-bddd-66dd18a9a277"} 2026-03-16T13:40:38.854Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "9c38a061-7456-4031-bddd-66dd18a9a277", "user": "replication"} 2026-03-16T13:40:38.862Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "9c38a061-7456-4031-bddd-66dd18a9a277", "err": "get primary pxc pod: not found"} 2026-03-16T13:40:44.021Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "bc69165a-b986-42a6-bbeb-d8b5ee19ef56", "err": "get primary pxc pod: not found"} 2026-03-16T13:40:49.150Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "352000a6-b32b-47bc-ac93-2094bc8384d4", "err": "get primary pxc pod: not found"} 2026-03-16T13:40:54.285Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "4f3fb7ee-2c90-4dec-b89c-d2244625ebf7", "err": "get primary pxc pod: not found"} 2026-03-16T13:43:20.883Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "b0ca8cd9-4b6b-4639-bdf1-f24cacefda72", "user": "root"} 2026-03-16T13:43:21.027Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "b0ca8cd9-4b6b-4639-bdf1-f24cacefda72", "new version": "8.0.43-34.1"} 2026-03-16T13:43:22.356Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "b0ca8cd9-4b6b-4639-bdf1-f24cacefda72"} 2026-03-16T13:43:29.298Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "fe018002-74bc-488b-ba57-8f96722ffd89"} 2026-03-16T13:43:34.492Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "10d71c72-8535-4559-bada-4ab687682564"} 2026-03-16T13:43:39.992Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "6cee01e8-9bd6-464b-bc85-e50cecaa1e59"} 2026-03-16T13:43:45.063Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "f282fdfe-4fd5-4701-bcfa-f1e6ccd2f7b8"} 2026-03-16T13:43:50.395Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "b0eb0d50-9b91-4c2c-828f-ae1c8b44ecf1"} 2026-03-16T13:43:55.974Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "743e7026-2a13-47b1-8d54-71198b9d3204"} 2026-03-16T13:44:01.297Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "0a832624-9ef8-4fa7-81cf-5ce5262b479f"} 2026-03-16T13:44:06.489Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "b3cf590b-e46a-490e-bbd2-4be7a053751e"} 2026-03-16T13:44:11.875Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "3b2f8a8c-f032-49b0-8ae0-4702110f199a"} 2026-03-16T13:44:17.168Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "319b3da0-3a9e-4d10-a410-94cf018767c1"} 2026-03-16T13:44:22.375Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "bc5e1e84-29dd-4210-b8af-45e32e429736"} 2026-03-16T13:44:27.690Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "57a641c2-3c27-49be-8825-884200d688e0"} 2026-03-16T13:44:33.001Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "14502dd0-9d61-47ad-9981-5ff63f1b6b38"} 2026-03-16T13:44:38.255Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "87ab7a50-d54e-49ab-babf-e7c94724afc6"} 2026-03-16T13:44:43.490Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "f931453d-1298-49b3-bc48-41011be90ea1"} 2026-03-16T13:44:48.571Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "926b13de-1dce-4895-a599-0afc611f7ba5"} 2026-03-16T13:44:54.019Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "aee103e9-a837-4952-aa47-8480cc4b57de"} 2026-03-16T13:44:59.600Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "244a2446-5d65-44c1-88e9-b64a9dabe08e"} 2026-03-16T13:45:05.060Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "d93ae3df-1004-42cb-92ec-cc118b8813f0"} 2026-03-16T13:45:10.163Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "dbbafa4b-cd71-48bb-bfbb-93d779cfb421"} 2026-03-16T13:45:12.916Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "432ca9b7-e038-4faf-83ec-dbca830d363b", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-03-16T13:45:12.989Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "432ca9b7-e038-4faf-83ec-dbca830d363b", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-03-16T13:45:13.058Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "432ca9b7-e038-4faf-83ec-dbca830d363b", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T13:45:13.133Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "432ca9b7-e038-4faf-83ec-dbca830d363b", "object": "some-name-tls-issue-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T13:45:15.597Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "432ca9b7-e038-4faf-83ec-dbca830d363b", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.239.55:3306: connect: connection refused"} 2026-03-16T13:45:16.305Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "bff69f54-604b-4a53-96e7-a4ce99f82d13", "object": "some-name-tls-issue-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-03-16T13:45:23.331Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "bff69f54-604b-4a53-96e7-a4ce99f82d13", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.239.55:3306: connect: connection refused"} 2026-03-16T13:45:30.545Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "a6e50c0f-cc6c-4dbb-9e9d-cc557ecb4b05", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.239.55:3306: connect: connection refused"} 2026-03-16T13:45:37.741Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name-tls-issue","namespace":"tls-issue-cert-manager-15716"}, "namespace": "tls-issue-cert-manager-15716", "name": "some-name-tls-issue", "reconcileID": "5aaf13ad-3dad-48db-9107-451862d42db2", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.239.55:3306: connect: connection refused"} + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n tls-issue-cert-manager-15716 some-name-tls-issue --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name-tls-issue patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.x6t8q0Vfl7 ++ mktemp + local LAST_ERR=/tmp/tmp.7MEluWukPD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.x6t8q0Vfl7 perconaxtradbcluster.pxc.percona.com "some-name-tls-issue" deleted from tls-issue-cert-manager-15716 namespace + cat /tmp/tmp.7MEluWukPD + rm /tmp/tmp.x6t8q0Vfl7 /tmp/tmp.7MEluWukPD + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.TYt8UxnN0B ++ mktemp + local LAST_ERR=/tmp/tmp.FiTELiw1iF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TYt8UxnN0B No resources found + cat /tmp/tmp.FiTELiw1iF + rm /tmp/tmp.TYt8UxnN0B /tmp/tmp.FiTELiw1iF + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.YLCyM9aMzb ++ mktemp + local LAST_ERR=/tmp/tmp.CaBwlveegu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YLCyM9aMzb No resources found + cat /tmp/tmp.CaBwlveegu + rm /tmp/tmp.YLCyM9aMzb /tmp/tmp.CaBwlveegu + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.gchaQqT7hk ++ mktemp + local LAST_ERR=/tmp/tmp.s4JMADFjFW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gchaQqT7hk validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.s4JMADFjFW + rm /tmp/tmp.gchaQqT7hk /tmp/tmp.s4JMADFjFW + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml namespace "cert-manager" deleted + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace tls-issue-cert-manager-15716 + rm -rf /tmp/tmp.JPhhnNyild ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + desc 'test passed' + set +o xtrace + local LAST_OUT=/tmp/tmp.CKt2qXsfzp ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.AOc0udDcBF ++ mktemp + local LAST_ERR=/tmp/tmp.gcFDrEmRGI + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.PoDeh3KjiS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace tls-issue-cert-manager-15716 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator