Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/logs/proxysql-scheduler-8-0.log Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 + cluster=some-name + create_infra proxysql-scheduler-20685 + local ns=proxysql-scheduler-20685 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n proxysql-scheduler-8139 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.NNAgoIzCr9 ++ mktemp + local LAST_ERR=/tmp/tmp.vtgwou8l5o + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NNAgoIzCr9 perconaxtradbcluster.pxc.percona.com "some-name" deleted from proxysql-scheduler-8139 namespace + cat /tmp/tmp.vtgwou8l5o + rm /tmp/tmp.NNAgoIzCr9 /tmp/tmp.vtgwou8l5o + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.dyGOclO3CS ++ mktemp + local LAST_ERR=/tmp/tmp.cMfDoOYI3b + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dyGOclO3CS No resources found + cat /tmp/tmp.cMfDoOYI3b + rm /tmp/tmp.dyGOclO3CS /tmp/tmp.cMfDoOYI3b + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.lwbwtzvvTn ++ mktemp + local LAST_ERR=/tmp/tmp.AFpIjIFXB3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lwbwtzvvTn No resources found + cat /tmp/tmp.AFpIjIFXB3 + rm /tmp/tmp.lwbwtzvvTn /tmp/tmp.AFpIjIFXB3 + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl api-resources ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get clusterrolebinding ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns ++ mktemp + awk '{print$1}' + local LAST_OUT=/tmp/tmp.rTo52rnuSQ ++ mktemp + local LAST_ERR=/tmp/tmp.vHu6JIZEPI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.1xMUA5pqZv ++ mktemp + local LAST_ERR=/tmp/tmp.5HsXOaUzAD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1xMUA5pqZv + cat /tmp/tmp.5HsXOaUzAD + rm /tmp/tmp.1xMUA5pqZv /tmp/tmp.5HsXOaUzAD + return 0 namespace "proxysql-scheduler-8139" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rTo52rnuSQ namespace "pxc-operator" deleted + cat /tmp/tmp.vHu6JIZEPI + rm /tmp/tmp.rTo52rnuSQ /tmp/tmp.vHu6JIZEPI + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.9MmOqrEJko ++ mktemp + local LAST_ERR=/tmp/tmp.z4PJx9OrRo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9MmOqrEJko namespace/pxc-operator created + cat /tmp/tmp.z4PJx9OrRo + rm /tmp/tmp.9MmOqrEJko /tmp/tmp.z4PJx9OrRo + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.JqNhMEA9Wt +++ mktemp ++ local LAST_ERR=/tmp/tmp.rKq6EujxKr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JqNhMEA9Wt ++ cat /tmp/tmp.rKq6EujxKr ++ rm /tmp/tmp.JqNhMEA9Wt /tmp/tmp.rKq6EujxKr ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2399-dbfcca1d-3-cluster7 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.PCvbb6ICzn ++ mktemp + local LAST_ERR=/tmp/tmp.GHGIBpgMRu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2399-dbfcca1d-3-cluster7 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PCvbb6ICzn Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2399-dbfcca1d-3-cluster7" modified. + cat /tmp/tmp.GHGIBpgMRu + rm /tmp/tmp.PCvbb6ICzn /tmp/tmp.GHGIBpgMRu + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.T0nrjBsUWG ++ mktemp + local LAST_ERR=/tmp/tmp.O4TRnZLSlL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.T0nrjBsUWG customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.O4TRnZLSlL + rm /tmp/tmp.T0nrjBsUWG /tmp/tmp.O4TRnZLSlL + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: pxc-operator^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/deploy/cw-rbac.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.FUYA41hY6S ++ mktemp + local LAST_ERR=/tmp/tmp.QuLWuTQuYq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FUYA41hY6S clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.QuLWuTQuYq + rm /tmp/tmp.FUYA41hY6S /tmp/tmp.QuLWuTQuYq + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2399-dbfcca1d^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "PXCO_FEATURE_GATES").value) = ""' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.jglbdApJUS ++ mktemp + local LAST_ERR=/tmp/tmp.vlIflwA2ss + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jglbdApJUS deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.vlIflwA2ss + rm /tmp/tmp.jglbdApJUS /tmp/tmp.vlIflwA2ss + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.QV6y8O2feZ ++ mktemp + local LAST_ERR=/tmp/tmp.Rp4bnRTNlB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QV6y8O2feZ pod/percona-xtradb-cluster-operator-c7445dd5-gw2cb condition met + cat /tmp/tmp.Rp4bnRTNlB + rm /tmp/tmp.QV6y8O2feZ /tmp/tmp.Rp4bnRTNlB + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ head -1 ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.wc25lADAvQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.mlivuKVH2T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wc25lADAvQ ++ cat /tmp/tmp.mlivuKVH2T ++ rm /tmp/tmp.wc25lADAvQ /tmp/tmp.mlivuKVH2T ++ return 0 + wait_pod percona-xtradb-cluster-operator-c7445dd5-gw2cb 480 pxc-operator + local pod=percona-xtradb-cluster-operator-c7445dd5-gw2cb + local max_retry=480 + local ns=pxc-operator ++ grep -E '^(pxc|proxysql)$' ++ echo percona-xtradb-cluster-operator-c7445dd5-gw2cb ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-c7445dd5-gw2cb condition met waiting for pod/percona-xtradb-cluster-operator-c7445dd5-gw2cb to become Ready.Ok + sleep 3 + create_namespace proxysql-scheduler-20685 + local namespace=proxysql-scheduler-20685 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrole ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces proxysql-scheduler-20685' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces proxysql-scheduler-20685 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace proxysql-scheduler-20685 + xargs kubectl delete ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.0gAyHDStCN + local LAST_OUT=/tmp/tmp.sUHYYnd4t2 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.4GC1aYi8Oc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + awk '{print$1}' + local LAST_ERR=/tmp/tmp.Rhzf7Yfa6u + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-20685 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-20685 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0gAyHDStCN + cat /tmp/tmp.4GC1aYi8Oc + rm /tmp/tmp.0gAyHDStCN /tmp/tmp.4GC1aYi8Oc + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-20685 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.sUHYYnd4t2 + cat /tmp/tmp.Rhzf7Yfa6u Error from server (NotFound): namespaces "proxysql-scheduler-20685" not found + rm /tmp/tmp.sUHYYnd4t2 /tmp/tmp.Rhzf7Yfa6u + return 1 + : + wait_for_delete namespace/proxysql-scheduler-20685 + local res=namespace/proxysql-scheduler-20685 + echo -n 'waiting for namespace/proxysql-scheduler-20685 to be deleted' waiting for namespace/proxysql-scheduler-20685 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "proxysql-scheduler-20685" not found + desc 'create namespace proxysql-scheduler-20685' + set +o xtrace ----------------------------------------------------------------------------------- create namespace proxysql-scheduler-20685 ----------------------------------------------------------------------------------- + kubectl_bin create namespace proxysql-scheduler-20685 ++ mktemp + local LAST_OUT=/tmp/tmp.BpOyObxm0d ++ mktemp + local LAST_ERR=/tmp/tmp.mnzl3bjRss + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace proxysql-scheduler-20685 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BpOyObxm0d namespace/proxysql-scheduler-20685 created + cat /tmp/tmp.mnzl3bjRss + rm /tmp/tmp.BpOyObxm0d /tmp/tmp.mnzl3bjRss + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.v3l5FHPfQ8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.9mVQxmUr6L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v3l5FHPfQ8 ++ cat /tmp/tmp.9mVQxmUr6L ++ rm /tmp/tmp.v3l5FHPfQ8 /tmp/tmp.9mVQxmUr6L ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2399-dbfcca1d-3-cluster7 --namespace=proxysql-scheduler-20685 ++ mktemp + local LAST_OUT=/tmp/tmp.r5aro5wm4Q ++ mktemp + local LAST_ERR=/tmp/tmp.Y60k6aPnBy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2399-dbfcca1d-3-cluster7 --namespace=proxysql-scheduler-20685 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.r5aro5wm4Q Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2399-dbfcca1d-3-cluster7" modified. + cat /tmp/tmp.Y60k6aPnBy + rm /tmp/tmp.r5aro5wm4Q /tmp/tmp.Y60k6aPnBy + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.SmGXqjNk5X ++ mktemp + local LAST_ERR=/tmp/tmp.n8NDKvp2fo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SmGXqjNk5X secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.n8NDKvp2fo + rm /tmp/tmp.SmGXqjNk5X /tmp/tmp.n8NDKvp2fo + return 0 + desc 'create PXC cluster: some-name' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster: some-name ----------------------------------------------------------------------------------- + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.9LdtmO0PE6 ++ mktemp + local LAST_ERR=/tmp/tmp.TiPS8xhLYW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9LdtmO0PE6 secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.TiPS8xhLYW + rm /tmp/tmp.9LdtmO0PE6 /tmp/tmp.TiPS8xhLYW + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/client.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.proxysql-scheduler-20685~ + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.HzkPAwO0a5 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2399-dbfcca1d#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_ERR=/tmp/tmp.HM6iLIz5mG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HzkPAwO0a5 deployment.apps/pxc-client created + cat /tmp/tmp.HM6iLIz5mG + rm /tmp/tmp.HzkPAwO0a5 /tmp/tmp.HM6iLIz5mG + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/conf/some-name.yml '' + kubectl_bin apply -f - + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/conf/some-name.yml + local pvc_name= ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2399-dbfcca1d#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.proxysql-scheduler-20685~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + local LAST_OUT=/tmp/tmp.oa2D88ioEV + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + local LAST_ERR=/tmp/tmp.7EXC6W83Iy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/conf/some-name.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oa2D88ioEV perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.7EXC6W83Iy + rm /tmp/tmp.oa2D88ioEV /tmp/tmp.7EXC6W83Iy + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.q2eARdrE1s ++++ mktemp +++ local LAST_ERR=/tmp/tmp.SdWK6ubwbG +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.q2eARdrE1s +++ cat /tmp/tmp.SdWK6ubwbG +++ rm /tmp/tmp.q2eARdrE1s /tmp/tmp.SdWK6ubwbG +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.kiDs42Q1ZZ ++++ mktemp +++ local LAST_ERR=/tmp/tmp.o0bh03Pci5 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.kiDs42Q1ZZ +++ cat /tmp/tmp.o0bh03Pci5 +++ rm /tmp/tmp.kiDs42Q1ZZ /tmp/tmp.o0bh03Pci5 +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-20685 ++ mktemp + local LAST_OUT=/tmp/tmp.MB5k5zblfQ ++ mktemp + local LAST_ERR=/tmp/tmp.DvbNbIvmLv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-20685 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-20685 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-20685 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.MB5k5zblfQ + cat /tmp/tmp.DvbNbIvmLv error: no matching resources found + rm /tmp/tmp.MB5k5zblfQ /tmp/tmp.DvbNbIvmLv + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-pxc-0 ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-pxc-1 + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo some-name-pxc-2 + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.En7bMnjRQ9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ULk6YiGWMB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.En7bMnjRQ9 ++ cat /tmp/tmp.ULk6YiGWMB ++ rm /tmp/tmp.En7bMnjRQ9 /tmp/tmp.ULk6YiGWMB ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vmXjgGGjNt +++ mktemp ++ local LAST_ERR=/tmp/tmp.0ot81yhcMI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vmXjgGGjNt ++ cat /tmp/tmp.0ot81yhcMI ++ rm /tmp/tmp.vmXjgGGjNt /tmp/tmp.0ot81yhcMI ++ return 0 + client_pod=pxc-client-67fc4995bb-zdm84 + wait_pod pxc-client-67fc4995bb-zdm84 + local pod=pxc-client-67fc4995bb-zdm84 + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-zdm84 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-zdm84 condition met waiting for pod/pxc-client-67fc4995bb-zdm84 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.484E6hgNna +++ mktemp ++ local LAST_ERR=/tmp/tmp.TF2XBI3H76 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.484E6hgNna ++ cat /tmp/tmp.TF2XBI3H76 ++ rm /tmp/tmp.484E6hgNna /tmp/tmp.TF2XBI3H76 ++ return 0 + client_pod=pxc-client-67fc4995bb-zdm84 + wait_pod pxc-client-67fc4995bb-zdm84 + local pod=pxc-client-67fc4995bb-zdm84 + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-zdm84 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-zdm84 condition met waiting for pod/pxc-client-67fc4995bb-zdm84 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8ildAjl5Ts +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xtmf583gg8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8ildAjl5Ts ++ cat /tmp/tmp.Xtmf583gg8 ++ rm /tmp/tmp.8ildAjl5Ts /tmp/tmp.Xtmf583gg8 ++ return 0 + client_pod=pxc-client-67fc4995bb-zdm84 + wait_pod pxc-client-67fc4995bb-zdm84 + local pod=pxc-client-67fc4995bb-zdm84 + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-zdm84 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-zdm84 condition met waiting for pod/pxc-client-67fc4995bb-zdm84 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.zoZKt1BYg2/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.zoZKt1BYg2/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.zoZKt1BYg2/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4qErag8b6D +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZFqLffXFGM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4qErag8b6D ++ cat /tmp/tmp.ZFqLffXFGM ++ rm /tmp/tmp.4qErag8b6D /tmp/tmp.ZFqLffXFGM ++ return 0 + client_pod=pxc-client-67fc4995bb-zdm84 + wait_pod pxc-client-67fc4995bb-zdm84 + local pod=pxc-client-67fc4995bb-zdm84 + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-zdm84 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-zdm84 condition met waiting for pod/pxc-client-67fc4995bb-zdm84 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.zoZKt1BYg2/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.zoZKt1BYg2/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.zoZKt1BYg2/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oExgSx2mST +++ mktemp ++ local LAST_ERR=/tmp/tmp.K2Z1kwjJI7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oExgSx2mST ++ cat /tmp/tmp.K2Z1kwjJI7 ++ rm /tmp/tmp.oExgSx2mST /tmp/tmp.K2Z1kwjJI7 ++ return 0 + client_pod=pxc-client-67fc4995bb-zdm84 + wait_pod pxc-client-67fc4995bb-zdm84 + local pod=pxc-client-67fc4995bb-zdm84 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-zdm84 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-zdm84 condition met waiting for pod/pxc-client-67fc4995bb-zdm84 to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.zoZKt1BYg2/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.zoZKt1BYg2/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.zoZKt1BYg2/select-1.sql + is_keyring_plugin_in_use some-name + local cluster=some-name + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + kubectl exec some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + grep -E -o 'early-plugin-load=keyring_\w+.so' + return 1 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.juVobgkDgR +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xi2CWF5h9f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.juVobgkDgR ++ cat /tmp/tmp.Xi2CWF5h9f ++ rm /tmp/tmp.juVobgkDgR /tmp/tmp.Xi2CWF5h9f ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7XAWzct5Tc +++ mktemp ++ local LAST_ERR=/tmp/tmp.vSyRcSYEih ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7XAWzct5Tc ++ cat /tmp/tmp.vSyRcSYEih ++ rm /tmp/tmp.7XAWzct5Tc /tmp/tmp.vSyRcSYEih ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.eEEzgcWcxP ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.f0e9z0I3lB +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.eEEzgcWcxP +++++ cat /tmp/tmp.f0e9z0I3lB +++++ rm /tmp/tmp.eEEzgcWcxP /tmp/tmp.f0e9z0I3lB +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.M4UJ88Edg5 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.5Joze4gS6z +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.M4UJ88Edg5 +++++ cat /tmp/tmp.5Joze4gS6z +++++ rm /tmp/tmp.M4UJ88Edg5 /tmp/tmp.5Joze4gS6z +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c6lR6V3vfC +++ mktemp ++ local LAST_ERR=/tmp/tmp.mi2heMAoh2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c6lR6V3vfC ++ cat /tmp/tmp.mi2heMAoh2 ++ rm /tmp/tmp.c6lR6V3vfC /tmp/tmp.mi2heMAoh2 ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check proxysql custom config' + set +o xtrace ----------------------------------------------------------------------------------- check proxysql custom config ----------------------------------------------------------------------------------- + compare_proxysql_cfg some-name-proxysql-0 proxysql-cfg + local pod=some-name-proxysql-0 + local compare_file=proxysql-cfg + local 'query=SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' + compare_mysql_cmd_local proxysql-cfg 'SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=proxysql-cfg + local 'command=SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/proxysql-cfg.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/proxysql-cfg-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.zoZKt1BYg2/proxysql-cfg.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/proxysql-cfg.sql /tmp/tmp.zoZKt1BYg2/proxysql-cfg.sql + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-pxc + local resource=statefulset/some-name-pxc + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc.yml + local new_result=/tmp/tmp.zoZKt1BYg2/statefulset_some-name-pxc.yml + desc 'compare statefulset/some-name-pxc-' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/some-name-pxc- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k124.yml ']' + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-aks.yml ']' + kubectl_bin get -o yaml statefulset/some-name-pxc ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-20685", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.qfR5kUbB9F ++ mktemp + local LAST_ERR=/tmp/tmp.NhC5GoXngY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-pxc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qfR5kUbB9F + cat /tmp/tmp.NhC5GoXngY + rm /tmp/tmp.qfR5kUbB9F /tmp/tmp.NhC5GoXngY + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc.yml /tmp/tmp.zoZKt1BYg2/statefulset_some-name-pxc.yml + log 'compare_kubectl: statefulset/some-name-pxc OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:37:29+0000]' compare_kubectl: statefulset/some-name-pxc OK [2026-03-16T12:37:29+0000] compare_kubectl: statefulset/some-name-pxc OK + compare_kubectl statefulset/some-name-proxysql + local resource=statefulset/some-name-proxysql + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql.yml + local new_result=/tmp/tmp.zoZKt1BYg2/statefulset_some-name-proxysql.yml + desc 'compare statefulset/some-name-proxysql-' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/some-name-proxysql- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k124.yml ']' + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-aks.yml ']' + kubectl_bin get -o yaml statefulset/some-name-proxysql + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-20685", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.1jSoJc81c5 ++ mktemp + local LAST_ERR=/tmp/tmp.ZFCrKxukbd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1jSoJc81c5 + cat /tmp/tmp.ZFCrKxukbd + rm /tmp/tmp.1jSoJc81c5 /tmp/tmp.ZFCrKxukbd + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql.yml /tmp/tmp.zoZKt1BYg2/statefulset_some-name-proxysql.yml + log 'compare_kubectl: statefulset/some-name-proxysql OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:37:30+0000]' compare_kubectl: statefulset/some-name-proxysql OK [2026-03-16T12:37:30+0000] compare_kubectl: statefulset/some-name-proxysql OK + compare_kubectl service/some-name-pxc + local resource=service/some-name-pxc + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc.yml + local new_result=/tmp/tmp.zoZKt1BYg2/service_some-name-pxc.yml + desc 'compare service/some-name-pxc-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-pxc- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ bc -l ++ echo '1.32 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ bc -l ++ echo '1.32 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k124.yml ']' + version_gt 1.22 ++ bc -l ++ echo '1.32 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-aks.yml ']' + kubectl_bin get -o yaml service/some-name-pxc + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-20685", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.B2mMRWcAA8 ++ mktemp + local LAST_ERR=/tmp/tmp.c9AWHXMLbw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-pxc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.B2mMRWcAA8 + cat /tmp/tmp.c9AWHXMLbw + rm /tmp/tmp.B2mMRWcAA8 /tmp/tmp.c9AWHXMLbw + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc.yml /tmp/tmp.zoZKt1BYg2/service_some-name-pxc.yml + log 'compare_kubectl: service/some-name-pxc OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:37:31+0000]' compare_kubectl: service/some-name-pxc OK [2026-03-16T12:37:31+0000] compare_kubectl: service/some-name-pxc OK + compare_kubectl service/some-name-proxysql + local resource=service/some-name-proxysql + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql.yml + local new_result=/tmp/tmp.zoZKt1BYg2/service_some-name-proxysql.yml + desc 'compare service/some-name-proxysql-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-proxysql- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ bc -l ++ echo '1.32 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k124.yml ']' + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-20685", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml service/some-name-proxysql ++ mktemp + local LAST_OUT=/tmp/tmp.w1ejftP6Gh ++ mktemp + local LAST_ERR=/tmp/tmp.L9xQ1Hao63 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.w1ejftP6Gh + cat /tmp/tmp.L9xQ1Hao63 + rm /tmp/tmp.w1ejftP6Gh /tmp/tmp.L9xQ1Hao63 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql.yml /tmp/tmp.zoZKt1BYg2/service_some-name-proxysql.yml + log 'compare_kubectl: service/some-name-proxysql OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:37:32+0000]' compare_kubectl: service/some-name-proxysql OK [2026-03-16T12:37:32+0000] compare_kubectl: service/some-name-proxysql OK + compare_kubectl service/some-name-proxysql-unready + local resource=service/some-name-proxysql-unready + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready.yml + local new_result=/tmp/tmp.zoZKt1BYg2/service_some-name-proxysql-unready.yml + desc 'compare service/some-name-proxysql-unready-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-proxysql-unready- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k124.yml ']' + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-aks.yml ']' + kubectl_bin get -o yaml service/some-name-proxysql-unready ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-20685", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.CwdVR2jtci ++ mktemp + local LAST_ERR=/tmp/tmp.j7j4hGEN4E + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-proxysql-unready + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CwdVR2jtci + cat /tmp/tmp.j7j4hGEN4E + rm /tmp/tmp.CwdVR2jtci /tmp/tmp.j7j4hGEN4E + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready.yml /tmp/tmp.zoZKt1BYg2/service_some-name-proxysql-unready.yml + log 'compare_kubectl: service/some-name-proxysql-unready OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:37:33+0000]' compare_kubectl: service/some-name-proxysql-unready OK [2026-03-16T12:37:33+0000] compare_kubectl: service/some-name-proxysql-unready OK + sleep 120 + desc 'check if scheduler is enabled in all ProxySQL servers' + set +o xtrace ----------------------------------------------------------------------------------- check if scheduler is enabled in all ProxySQL servers ----------------------------------------------------------------------------------- + compare_scheduler some-name-proxysql-0 scheduler-0 + local pod=some-name-proxysql-0 + local compare_file=scheduler-0 + compare_mysql_cmd_local scheduler-0 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=scheduler-0 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-0-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.zoZKt1BYg2/scheduler-0.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql /tmp/tmp.zoZKt1BYg2/scheduler-0.sql + log 'scheduler is enabled in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:39:35+0000]' scheduler is enabled in some-name-proxysql-0: OK [2026-03-16T12:39:35+0000] scheduler is enabled in some-name-proxysql-0: OK + compare_scheduler some-name-proxysql-1 scheduler-1 + local pod=some-name-proxysql-1 + local compare_file=scheduler-1 + compare_mysql_cmd_local scheduler-1 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=scheduler-1 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.zoZKt1BYg2/scheduler-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql /tmp/tmp.zoZKt1BYg2/scheduler-1.sql + log 'scheduler is enabled in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:39:37+0000]' scheduler is enabled in some-name-proxysql-1: OK [2026-03-16T12:39:37+0000] scheduler is enabled in some-name-proxysql-1: OK + desc 'check if scheduler is doing its job in all ProxySQL servers' + set +o xtrace ----------------------------------------------------------------------------------- check if scheduler is doing its job in all ProxySQL servers ----------------------------------------------------------------------------------- + compare_mysql_servers some-name-proxysql-0 mysql-servers-0 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:39:38+0000]' mysql_servers are configured in some-name-proxysql-0: OK [2026-03-16T12:39:38+0000] mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:39:40+0000]' mysql_servers are configured in some-name-proxysql-1: OK [2026-03-16T12:39:40+0000] mysql_servers are configured in some-name-proxysql-1: OK + desc 'check disabling scheduler' + set +o xtrace ----------------------------------------------------------------------------------- check disabling scheduler ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"enabled": false}}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.SVq75WvJTX ++ mktemp + local LAST_ERR=/tmp/tmp.jBhW4xUm4J + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"enabled": false}}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SVq75WvJTX perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.jBhW4xUm4J + rm /tmp/tmp.SVq75WvJTX /tmp/tmp.jBhW4xUm4J + return 0 + sleep_with_log 90 + local d=90 + log 'sleeping for 90 seconds' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:39:42+0000]' sleeping for 90 seconds [2026-03-16T12:39:42+0000] sleeping for 90 seconds + sleep 90 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IfuTmE4eEF +++ mktemp ++ local LAST_ERR=/tmp/tmp.otvCCaa1ud ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IfuTmE4eEF ++ cat /tmp/tmp.otvCCaa1ud ++ rm /tmp/tmp.IfuTmE4eEF /tmp/tmp.otvCCaa1ud ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AYBRA9T2ml +++ mktemp ++ local LAST_ERR=/tmp/tmp.748MNJoc7k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AYBRA9T2ml ++ cat /tmp/tmp.748MNJoc7k ++ rm /tmp/tmp.AYBRA9T2ml /tmp/tmp.748MNJoc7k ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.sujAAWJaMZ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.FQo9fMfMHi +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.sujAAWJaMZ +++++ cat /tmp/tmp.FQo9fMfMHi +++++ rm /tmp/tmp.sujAAWJaMZ /tmp/tmp.FQo9fMfMHi +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Oag7yv4xv2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.W5OjVhrsvt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Oag7yv4xv2 +++++ cat /tmp/tmp.W5OjVhrsvt +++++ rm /tmp/tmp.Oag7yv4xv2 /tmp/tmp.W5OjVhrsvt +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5E0Ou8MM2r +++ mktemp ++ local LAST_ERR=/tmp/tmp.PrsU9ssWuW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5E0Ou8MM2r ++ cat /tmp/tmp.PrsU9ssWuW ++ rm /tmp/tmp.5E0Ou8MM2r /tmp/tmp.PrsU9ssWuW ++ return 0 + [[ 2 == \2 ]] + echo + compare_scheduler some-name-proxysql-0 scheduler-0-disabled + local pod=some-name-proxysql-0 + local compare_file=scheduler-0-disabled + compare_mysql_cmd_local scheduler-0-disabled 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=scheduler-0-disabled + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-0-disabled.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-0-disabled-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.zoZKt1BYg2/scheduler-0-disabled.sql ']' + sleep 20 + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-0-disabled.sql /tmp/tmp.zoZKt1BYg2/scheduler-0-disabled.sql + log 'scheduler is disabled in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:41:50+0000]' scheduler is disabled in some-name-proxysql-0: OK [2026-03-16T12:41:50+0000] scheduler is disabled in some-name-proxysql-0: OK + compare_scheduler some-name-proxysql-1 scheduler-1-disabled + local pod=some-name-proxysql-1 + local compare_file=scheduler-1-disabled + compare_mysql_cmd_local scheduler-1-disabled 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=scheduler-1-disabled + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-1-disabled.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-1-disabled-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.zoZKt1BYg2/scheduler-1-disabled.sql ']' + sleep 20 + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-1-disabled.sql /tmp/tmp.zoZKt1BYg2/scheduler-1-disabled.sql + log 'scheduler is disabled in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:42:14+0000]' scheduler is disabled in some-name-proxysql-1: OK [2026-03-16T12:42:14+0000] scheduler is disabled in some-name-proxysql-1: OK + kubectl get pod NAME READY STATUS RESTARTS AGE pxc-client-67fc4995bb-zdm84 2/2 Running 0 10m some-name-proxysql-0 3/3 Running 1 (101s ago) 2m20s some-name-proxysql-1 3/3 Running 0 2m32s some-name-pxc-0 1/1 Running 0 10m some-name-pxc-1 1/1 Running 0 9m1s some-name-pxc-2 1/1 Running 0 7m40s + kubectl get pxc NAME ENDPOINT STATUS PXC PROXYSQL HAPROXY AGE some-name some-name-proxysql.proxysql-scheduler-20685 ready 3 2 10m + desc 'check enabling scheduler' + set +o xtrace ----------------------------------------------------------------------------------- check enabling scheduler ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"enabled": true}}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.2vKVceqjHK ++ mktemp + local LAST_ERR=/tmp/tmp.FGsMcyPXPF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"enabled": true}}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2vKVceqjHK perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.FGsMcyPXPF + rm /tmp/tmp.2vKVceqjHK /tmp/tmp.FGsMcyPXPF + return 0 + sleep_with_log 90 + local d=90 + log 'sleeping for 90 seconds' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:42:18+0000]' sleeping for 90 seconds [2026-03-16T12:42:18+0000] sleeping for 90 seconds + sleep 90 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o8Gxya1tRp +++ mktemp ++ local LAST_ERR=/tmp/tmp.eusTU4w5ml ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o8Gxya1tRp ++ cat /tmp/tmp.eusTU4w5ml ++ rm /tmp/tmp.o8Gxya1tRp /tmp/tmp.eusTU4w5ml ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qTzFS4fA6g +++ mktemp ++ local LAST_ERR=/tmp/tmp.x6hV800rPC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qTzFS4fA6g ++ cat /tmp/tmp.x6hV800rPC ++ rm /tmp/tmp.qTzFS4fA6g /tmp/tmp.x6hV800rPC ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Mjw4Ya3PLe ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.uN9tX4k6Cs +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Mjw4Ya3PLe +++++ cat /tmp/tmp.uN9tX4k6Cs +++++ rm /tmp/tmp.Mjw4Ya3PLe /tmp/tmp.uN9tX4k6Cs +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8fzdrjstul ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.RStt2RVmpp +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8fzdrjstul +++++ cat /tmp/tmp.RStt2RVmpp +++++ rm /tmp/tmp.8fzdrjstul /tmp/tmp.RStt2RVmpp +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fxUodoK94p +++ mktemp ++ local LAST_ERR=/tmp/tmp.WH8juTp5gE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fxUodoK94p ++ cat /tmp/tmp.WH8juTp5gE ++ rm /tmp/tmp.fxUodoK94p /tmp/tmp.WH8juTp5gE ++ return 0 + [[ 2 == \2 ]] + echo + compare_scheduler some-name-proxysql-0 scheduler-0 + local pod=some-name-proxysql-0 + local compare_file=scheduler-0 + compare_mysql_cmd_local scheduler-0 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=scheduler-0 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-0-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.zoZKt1BYg2/scheduler-0.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql /tmp/tmp.zoZKt1BYg2/scheduler-0.sql + log 'scheduler is enabled in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:44:04+0000]' scheduler is enabled in some-name-proxysql-0: OK [2026-03-16T12:44:04+0000] scheduler is enabled in some-name-proxysql-0: OK + compare_scheduler some-name-proxysql-1 scheduler-1 + local pod=some-name-proxysql-1 + local compare_file=scheduler-1 + compare_mysql_cmd_local scheduler-1 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=scheduler-1 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.zoZKt1BYg2/scheduler-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql /tmp/tmp.zoZKt1BYg2/scheduler-1.sql + log 'scheduler is enabled in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:44:05+0000]' scheduler is enabled in some-name-proxysql-1: OK [2026-03-16T12:44:05+0000] scheduler is enabled in some-name-proxysql-1: OK + desc 'check PXC pod 1 is promoted to writer when pod-0 is down' + set +o xtrace ----------------------------------------------------------------------------------- check PXC pod 1 is promoted to writer when pod-0 is down ----------------------------------------------------------------------------------- + LOOP_PID=10340 + echo LOOP_PID=10340 LOOP_PID=10340 + sleep 10 + for i in '{1..20}' + kubectl delete pod some-name-pxc-0 pod "some-name-pxc-0" deleted from proxysql-scheduler-20685 namespace + log 'waiting for pod0 to be removed from proxysql' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:44:15+0000]' waiting for pod0 to be removed from proxysql [2026-03-16T12:44:15+0000] waiting for pod0 to be removed from proxysql + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-pod0-down 10340 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-pod0-down + local loop_pid=10340 + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-pod0-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'mysql_servers are configured in some-name-proxysql-0 when pxc-0 is down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:44:17+0000]' mysql_servers are configured in some-name-proxysql-0 when pxc-0 is down: OK [2026-03-16T12:44:17+0000] mysql_servers are configured in some-name-proxysql-0 when pxc-0 is down: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-pod0-down 10340 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-pod0-down + local loop_pid=10340 + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-pod0-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'mysql_servers are configured in some-name-proxysql-1 when pxc-0 is down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:44:19+0000]' mysql_servers are configured in some-name-proxysql-1 when pxc-0 is down: OK [2026-03-16T12:44:19+0000] mysql_servers are configured in some-name-proxysql-1 when pxc-0 is down: OK + kill 10340 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/../functions: line 152: 10340 Terminated ( for i in {1..20}; do kubectl delete pod "${cluster}-pxc-0"; sleep 3; done ) + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vfDz5tvPOi +++ mktemp ++ local LAST_ERR=/tmp/tmp.xvgSScd4pI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vfDz5tvPOi ++ cat /tmp/tmp.xvgSScd4pI ++ rm /tmp/tmp.vfDz5tvPOi /tmp/tmp.xvgSScd4pI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yB1krvLlEG +++ mktemp ++ local LAST_ERR=/tmp/tmp.mcr81yiof2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yB1krvLlEG ++ cat /tmp/tmp.mcr81yiof2 ++ rm /tmp/tmp.yB1krvLlEG /tmp/tmp.mcr81yiof2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g7FRDc9EpH +++ mktemp ++ local LAST_ERR=/tmp/tmp.HG8I4eBJ1Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g7FRDc9EpH ++ cat /tmp/tmp.HG8I4eBJ1Y ++ rm /tmp/tmp.g7FRDc9EpH /tmp/tmp.HG8I4eBJ1Y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mli60ISliM +++ mktemp ++ local LAST_ERR=/tmp/tmp.22OvylAtyh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mli60ISliM ++ cat /tmp/tmp.22OvylAtyh ++ rm /tmp/tmp.mli60ISliM /tmp/tmp.22OvylAtyh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KBjlkpi1nP +++ mktemp ++ local LAST_ERR=/tmp/tmp.nDnpN8vr1N ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KBjlkpi1nP ++ cat /tmp/tmp.nDnpN8vr1N ++ rm /tmp/tmp.KBjlkpi1nP /tmp/tmp.nDnpN8vr1N ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9I6SBA6Sk1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.wnuW39EhaC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9I6SBA6Sk1 ++ cat /tmp/tmp.wnuW39EhaC ++ rm /tmp/tmp.9I6SBA6Sk1 /tmp/tmp.wnuW39EhaC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uHkncEMfBQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.QgGNujIZpl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uHkncEMfBQ ++ cat /tmp/tmp.QgGNujIZpl ++ rm /tmp/tmp.uHkncEMfBQ /tmp/tmp.QgGNujIZpl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 6 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ScG8UOW0Ky +++ mktemp ++ local LAST_ERR=/tmp/tmp.TlACnHTgw3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ScG8UOW0Ky ++ cat /tmp/tmp.TlACnHTgw3 ++ rm /tmp/tmp.ScG8UOW0Ky /tmp/tmp.TlACnHTgw3 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jnh3ZoezwU +++ mktemp ++ local LAST_ERR=/tmp/tmp.4pC569RWzJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jnh3ZoezwU ++ cat /tmp/tmp.4pC569RWzJ ++ rm /tmp/tmp.jnh3ZoezwU /tmp/tmp.4pC569RWzJ ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.95ADP5Bmud ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dLinjf2OKc +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.95ADP5Bmud +++++ cat /tmp/tmp.dLinjf2OKc +++++ rm /tmp/tmp.95ADP5Bmud /tmp/tmp.dLinjf2OKc +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5T8s1rfCz9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.RVlFfmLn6b +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5T8s1rfCz9 +++++ cat /tmp/tmp.RVlFfmLn6b +++++ rm /tmp/tmp.5T8s1rfCz9 /tmp/tmp.RVlFfmLn6b +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n5GHE5J04Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.y7OhrIV2He ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n5GHE5J04Y ++ cat /tmp/tmp.y7OhrIV2He ++ rm /tmp/tmp.n5GHE5J04Y /tmp/tmp.y7OhrIV2He ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check scaling PXC up to 5 replicas' + set +o xtrace ----------------------------------------------------------------------------------- check scaling PXC up to 5 replicas ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 5}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.RlofssJo4C ++ mktemp + local LAST_ERR=/tmp/tmp.hgoXETdsZd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 5}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RlofssJo4C perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.hgoXETdsZd + rm /tmp/tmp.RlofssJo4C /tmp/tmp.hgoXETdsZd + return 0 + wait_pod some-name-pxc-3 + local pod=some-name-pxc-3 + local max_retry=480 + local ns= ++ echo some-name-pxc-3 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-3 condition met waiting for pod/some-name-pxc-3 to become Ready.Ok + wait_pod some-name-pxc-4 + local pod=some-name-pxc-4 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo some-name-pxc-4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-4 condition met waiting for pod/some-name-pxc-4 to become Ready.Ok + sleep 120 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-1 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-1 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'new mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:50:04+0000]' new mysql_servers are configured in some-name-proxysql-0: OK [2026-03-16T12:50:04+0000] new mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-1 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-1 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'new mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:50:06+0000]' new mysql_servers are configured in some-name-proxysql-1: OK [2026-03-16T12:50:06+0000] new mysql_servers are configured in some-name-proxysql-1: OK + desc 'check scaling ProxySQL up' + set +o xtrace ----------------------------------------------------------------------------------- check scaling ProxySQL up ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"size": 3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.5gGvPs8k55 ++ mktemp + local LAST_ERR=/tmp/tmp.vF65GemyvL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"size": 3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5gGvPs8k55 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.vF65GemyvL + rm /tmp/tmp.5gGvPs8k55 /tmp/tmp.vF65GemyvL + return 0 + wait_pod some-name-proxysql-2 + local pod=some-name-proxysql-2 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo some-name-proxysql-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-2 condition met waiting for pod/some-name-proxysql-2 to become Ready.Ok + sleep 120 + compare_scheduler some-name-proxysql-2 scheduler-2 + local pod=some-name-proxysql-2 + local compare_file=scheduler-2 + compare_mysql_cmd_local scheduler-2 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + local command_id=scheduler-2 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.zoZKt1BYg2/scheduler-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2399/e2e-tests/proxysql-scheduler/compare/scheduler-2.sql /tmp/tmp.zoZKt1BYg2/scheduler-2.sql + log 'scheduler is enabled in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:52:44+0000]' scheduler is enabled in some-name-proxysql-2: OK [2026-03-16T12:52:44+0000] scheduler is enabled in some-name-proxysql-2: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2 + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-2' check runtime_mysql_servers in some-name-proxysql-2+ compare_mysql_cmd_local mysql-servers-2 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + echo + log 'mysql_servers are configured in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:52:45+0000]' mysql_servers are configured in some-name-proxysql-2: OK [2026-03-16T12:52:45+0000] mysql_servers are configured in some-name-proxysql-2: OK + desc 'check writerIsAlsoReader = false' + set +o xtrace ----------------------------------------------------------------------------------- check writerIsAlsoReader = false ----------------------------------------------------------------------------------- + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"writerIsAlsoReader": false}}}}' perconaxtradbcluster.pxc.percona.com/some-name patched + sleep 10 + wait_cluster_consistency some-name 5 3 + local cluster_name=some-name + local cluster_size=5 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NvDjJKyJIS +++ mktemp ++ local LAST_ERR=/tmp/tmp.5QFu36EulB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NvDjJKyJIS ++ cat /tmp/tmp.5QFu36EulB ++ rm /tmp/tmp.NvDjJKyJIS /tmp/tmp.5QFu36EulB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IXEd1uZ17L +++ mktemp ++ local LAST_ERR=/tmp/tmp.D746T7QaUO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IXEd1uZ17L ++ cat /tmp/tmp.D746T7QaUO ++ rm /tmp/tmp.IXEd1uZ17L /tmp/tmp.D746T7QaUO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k5JfbBKdTn +++ mktemp ++ local LAST_ERR=/tmp/tmp.V5ekK794fc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k5JfbBKdTn ++ cat /tmp/tmp.V5ekK794fc ++ rm /tmp/tmp.k5JfbBKdTn /tmp/tmp.V5ekK794fc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JwSwifvTz9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ewOyLtwwP9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JwSwifvTz9 ++ cat /tmp/tmp.ewOyLtwwP9 ++ rm /tmp/tmp.JwSwifvTz9 /tmp/tmp.ewOyLtwwP9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Je5zSxcG8t +++ mktemp ++ local LAST_ERR=/tmp/tmp.pVD8k8JKAP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Je5zSxcG8t ++ cat /tmp/tmp.pVD8k8JKAP ++ rm /tmp/tmp.Je5zSxcG8t /tmp/tmp.pVD8k8JKAP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kkPpLfonLW +++ mktemp ++ local LAST_ERR=/tmp/tmp.jqqralukzV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kkPpLfonLW ++ cat /tmp/tmp.jqqralukzV ++ rm /tmp/tmp.kkPpLfonLW /tmp/tmp.jqqralukzV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IsvFr8PaMx +++ mktemp ++ local LAST_ERR=/tmp/tmp.BvzmLGTvtJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IsvFr8PaMx ++ cat /tmp/tmp.BvzmLGTvtJ ++ rm /tmp/tmp.IsvFr8PaMx /tmp/tmp.BvzmLGTvtJ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.05LOZbG3lJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.M8Z1t18h2Q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.05LOZbG3lJ ++ cat /tmp/tmp.M8Z1t18h2Q ++ rm /tmp/tmp.05LOZbG3lJ /tmp/tmp.M8Z1t18h2Q ++ return 0 + [[ 5 == \5 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mHH5ML1sNS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.WcUAyHTekc +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mHH5ML1sNS +++++ cat /tmp/tmp.WcUAyHTekc +++++ rm /tmp/tmp.mHH5ML1sNS /tmp/tmp.WcUAyHTekc +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.g6Ap6HFXHS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xIQP9EySO6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.g6Ap6HFXHS +++++ cat /tmp/tmp.xIQP9EySO6 +++++ rm /tmp/tmp.g6Ap6HFXHS /tmp/tmp.xIQP9EySO6 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BjCip9h6Mc +++ mktemp ++ local LAST_ERR=/tmp/tmp.3ikYJpqB7J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BjCip9h6Mc ++ cat /tmp/tmp.3ikYJpqB7J ++ rm /tmp/tmp.BjCip9h6Mc /tmp/tmp.3ikYJpqB7J ++ return 0 + [[ 3 == \3 ]] + echo + sleep 60 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-1-writerNotReader + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-1-writerNotReader + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-1-writerNotReader 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'new mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:54:51+0000]' new mysql_servers are configured in some-name-proxysql-0: OK [2026-03-16T12:54:51+0000] new mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-1-writerNotReader + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-1-writerNotReader + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-1-writerNotReader 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'new mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:54:53+0000]' new mysql_servers are configured in some-name-proxysql-1: OK [2026-03-16T12:54:53+0000] new mysql_servers are configured in some-name-proxysql-1: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2-writerNotReader + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2-writerNotReader + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-2' check runtime_mysql_servers in some-name-proxysql-2+ compare_mysql_cmd_local mysql-servers-2-writerNotReader 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + echo + log 'mysql_servers are configured in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:54:54+0000]' mysql_servers are configured in some-name-proxysql-2: OK [2026-03-16T12:54:54+0000] mysql_servers are configured in some-name-proxysql-2: OK + desc 'check scaling PXC down to 3 replicas' + set +o xtrace ----------------------------------------------------------------------------------- check scaling PXC down to 3 replicas ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.VqaoL5hBhT ++ mktemp + local LAST_ERR=/tmp/tmp.yTsCHCYw1D + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VqaoL5hBhT perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.yTsCHCYw1D + rm /tmp/tmp.VqaoL5hBhT /tmp/tmp.yTsCHCYw1D + return 0 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HTWPwhVxu0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qMGiLoIN4A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HTWPwhVxu0 ++ cat /tmp/tmp.qMGiLoIN4A ++ rm /tmp/tmp.HTWPwhVxu0 /tmp/tmp.qMGiLoIN4A ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DJM3bipIoz +++ mktemp ++ local LAST_ERR=/tmp/tmp.d9jxdAwuZb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DJM3bipIoz ++ cat /tmp/tmp.d9jxdAwuZb ++ rm /tmp/tmp.DJM3bipIoz /tmp/tmp.d9jxdAwuZb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OLfdVMk8iK +++ mktemp ++ local LAST_ERR=/tmp/tmp.JOLGHju2cq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OLfdVMk8iK ++ cat /tmp/tmp.JOLGHju2cq ++ rm /tmp/tmp.OLfdVMk8iK /tmp/tmp.JOLGHju2cq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5KOfSnu5dN +++ mktemp ++ local LAST_ERR=/tmp/tmp.1tVVEzKp9J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5KOfSnu5dN ++ cat /tmp/tmp.1tVVEzKp9J ++ rm /tmp/tmp.5KOfSnu5dN /tmp/tmp.1tVVEzKp9J ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7NhtsWaYhc +++ mktemp ++ local LAST_ERR=/tmp/tmp.uP9sqEbdsg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7NhtsWaYhc ++ cat /tmp/tmp.uP9sqEbdsg ++ rm /tmp/tmp.7NhtsWaYhc /tmp/tmp.uP9sqEbdsg ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NrB5nKp1LG +++ mktemp ++ local LAST_ERR=/tmp/tmp.MXhNSlsq0V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NrB5nKp1LG ++ cat /tmp/tmp.MXhNSlsq0V ++ rm /tmp/tmp.NrB5nKp1LG /tmp/tmp.MXhNSlsq0V ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.f6wYmWXoCP ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.kIKRiKXsvH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.f6wYmWXoCP +++++ cat /tmp/tmp.kIKRiKXsvH +++++ rm /tmp/tmp.f6wYmWXoCP /tmp/tmp.kIKRiKXsvH +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ISSDSo5kWc ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hhcMl0WsEB +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ISSDSo5kWc +++++ cat /tmp/tmp.hhcMl0WsEB +++++ rm /tmp/tmp.ISSDSo5kWc /tmp/tmp.hhcMl0WsEB +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CIZrGWHkmX +++ mktemp ++ local LAST_ERR=/tmp/tmp.wgw3hXKoQI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CIZrGWHkmX ++ cat /tmp/tmp.wgw3hXKoQI ++ rm /tmp/tmp.CIZrGWHkmX /tmp/tmp.wgw3hXKoQI ++ return 0 + [[ 3 == \3 ]] + echo + sleep 60 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-scaledown + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-scaledown + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-scaledown 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:56:32+0000]' mysql_servers are configured in some-name-proxysql-0: OK [2026-03-16T12:56:32+0000] mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-scaledown + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-scaledown + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-scaledown 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:56:34+0000]' mysql_servers are configured in some-name-proxysql-1: OK [2026-03-16T12:56:34+0000] mysql_servers are configured in some-name-proxysql-1: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2-scaledown + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2-scaledown + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-2' check runtime_mysql_servers in some-name-proxysql-2+ compare_mysql_cmd_local mysql-servers-2-scaledown 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + echo + log 'mysql_servers are configured in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:56:35+0000]' mysql_servers are configured in some-name-proxysql-2: OK [2026-03-16T12:56:35+0000] mysql_servers are configured in some-name-proxysql-2: OK + desc 'check PXC pod 0 is reader and writer when pods 1 and 2 are down' + set +o xtrace ----------------------------------------------------------------------------------- check PXC pod 0 is reader and writer when pods 1 and 2 are down ----------------------------------------------------------------------------------- + log 'scaling PXC down to 1 replica' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:56:35+0000]' scaling PXC down to 1 replica [2026-03-16T12:56:35+0000] scaling PXC down to 1 replica + kubectl scale sts/some-name-pxc --replicas=1 statefulset.apps/some-name-pxc scaled + sleep 20 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-two-pod-down + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-two-pod-down + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 1 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 2 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 3 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 4 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'mysql_servers are configured in some-name-proxysql-0 when 2 pods are down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:57:45+0000]' mysql_servers are configured in some-name-proxysql-0 when 2 pods are down: OK [2026-03-16T12:57:45+0000] mysql_servers are configured in some-name-proxysql-0 when 2 pods are down: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-two-pod-down + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-two-pod-down + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'mysql_servers are configured in some-name-proxysql-1 when 2 pods are down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:57:47+0000]' mysql_servers are configured in some-name-proxysql-1 when 2 pods are down: OK [2026-03-16T12:57:47+0000] mysql_servers are configured in some-name-proxysql-1 when 2 pods are down: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2-two-pod-down + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2-two-pod-down + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-2' check runtime_mysql_servers in some-name-proxysql-2+ compare_mysql_cmd_local mysql-servers-2-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + echo + log 'mysql_servers are configured in some-name-proxysql-2 when 2 pods are down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-03-16T12:57:48+0000]' mysql_servers are configured in some-name-proxysql-2 when 2 pods are down: OK [2026-03-16T12:57:48+0000] mysql_servers are configured in some-name-proxysql-2 when 2 pods are down: OK + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + destroy proxysql-scheduler-20685 + local namespace=proxysql-scheduler-20685 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + tee /tmp/tmp.zoZKt1BYg2/operator.log + grep -v level=info +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ head -1 ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XbENrS10C5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.wvP2qDFyv0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XbENrS10C5 ++ cat /tmp/tmp.wvP2qDFyv0 ++ rm /tmp/tmp.XbENrS10C5 /tmp/tmp.wvP2qDFyv0 ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-c7445dd5-gw2cb ++ mktemp + local LAST_OUT=/tmp/tmp.bNdEU14Jw8 ++ mktemp + local LAST_ERR=/tmp/tmp.NL5cplcqkV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-c7445dd5-gw2cb + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bNdEU14Jw8 + cat /tmp/tmp.NL5cplcqkV + rm /tmp/tmp.bNdEU14Jw8 /tmp/tmp.NL5cplcqkV + return 0 2026-03-16T12:31:03.829Z INFO setup Feature gates {"PXCO_FEATURE_GATES": "", "enabled": ""} 2026-03-16T12:31:03.829Z INFO setup Manager starting up {"gitCommit": "dbfcca1d524d8d638feb3a7673ad33f836c01f0b", "gitBranch": "PR-2399-dbfcca1d", "buildTime": "2026-03-16T10:07:24Z", "goVersion": "go1.25.8", "os": "linux", "arch": "amd64"} 2026-03-16T12:31:03.829Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.32.13-gke.1059000"} 2026-03-16T12:31:03.832Z INFO setup Registering Components. 2026-03-16T12:31:04.248Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2026-03-16T12:31:04.248Z INFO controller-runtime.metrics Starting metrics server 2026-03-16T12:31:04.248Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2026-03-16T12:31:04.248Z INFO controller-runtime.webhook Starting webhook server 2026-03-16T12:31:04.248Z INFO setup Starting the Cmd. 2026-03-16T12:31:04.248Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2026-03-16T12:31:04.249Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2026-03-16T12:31:04.249Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2026-03-16T12:31:04.250Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2026-03-16T12:31:04.350Z INFO Attempting to acquire leader lease... {"lock": "pxc-operator/08db1feb.percona.com"} 2026-03-16T12:31:04.381Z DEBUG events percona-xtradb-cluster-operator-c7445dd5-gw2cb_492105da-e806-462e-9f57-827bedea91f0 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"48ac6dd0-8b82-465b-a387-531043304776","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1773664264374927009"}, "reason": "LeaderElection"} 2026-03-16T12:31:04.381Z INFO Successfully acquired lease {"lock": "pxc-operator/08db1feb.percona.com"} 2026-03-16T12:31:04.382Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2026-03-16T12:31:04.382Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2026-03-16T12:31:04.382Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2026-03-16T12:31:04.382Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2026-03-16T12:31:04.482Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2026-03-16T12:31:04.482Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2026-03-16T12:31:04.482Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2026-03-16T12:31:04.482Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2026-03-16T12:31:04.482Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2026-03-16T12:31:04.482Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2026-03-16T12:31:52.897Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "61f94dc1-02e7-43cc-be1a-14b555f428f4", "version": "1.20.0"} 2026-03-16T12:31:53.240Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "61f94dc1-02e7-43cc-be1a-14b555f428f4", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2026-03-16T12:31:53.260Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "61f94dc1-02e7-43cc-be1a-14b555f428f4", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2026-03-16T12:31:53.381Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "61f94dc1-02e7-43cc-be1a-14b555f428f4", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-03-16T12:31:53.423Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "61f94dc1-02e7-43cc-be1a-14b555f428f4", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-03-16T12:31:53.484Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "61f94dc1-02e7-43cc-be1a-14b555f428f4", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T12:31:53.526Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "61f94dc1-02e7-43cc-be1a-14b555f428f4", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T12:31:53.575Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "61f94dc1-02e7-43cc-be1a-14b555f428f4", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T12:31:53.649Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "61f94dc1-02e7-43cc-be1a-14b555f428f4", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-03-16T12:31:54.463Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "f8923d49-3513-4a94-a3e0-bbd5f4a440bc", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-03-16T12:31:54.509Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "f8923d49-3513-4a94-a3e0-bbd5f4a440bc", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-03-16T12:33:16.125Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "38d97bb8-0a8e-4147-b64b-18a482bde6d9", "user": "operator"} 2026-03-16T12:33:16.158Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "38d97bb8-0a8e-4147-b64b-18a482bde6d9", "user": "monitor"} 2026-03-16T12:33:16.231Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "38d97bb8-0a8e-4147-b64b-18a482bde6d9"} 2026-03-16T12:33:16.263Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "38d97bb8-0a8e-4147-b64b-18a482bde6d9"} 2026-03-16T12:33:16.297Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "38d97bb8-0a8e-4147-b64b-18a482bde6d9", "user": "xtrabackup"} 2026-03-16T12:33:16.346Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "38d97bb8-0a8e-4147-b64b-18a482bde6d9"} 2026-03-16T12:33:16.380Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "38d97bb8-0a8e-4147-b64b-18a482bde6d9", "user": "replication"} 2026-03-16T12:33:16.390Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "38d97bb8-0a8e-4147-b64b-18a482bde6d9", "err": "get primary pxc pod: not found"} 2026-03-16T12:33:21.500Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "67f7be51-d555-43a2-8353-bcb567783d0e", "err": "get primary pxc pod: not found"} 2026-03-16T12:33:26.604Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "0c1db14b-ea1e-452b-a5f8-831c1299cec6", "err": "get primary pxc pod: not found"} 2026-03-16T12:35:58.013Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c799318a-1ab5-4d59-b5a1-a619ebbda0fb", "user": "root"} 2026-03-16T12:35:58.155Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c799318a-1ab5-4d59-b5a1-a619ebbda0fb", "new version": "8.0.43-34.1"} 2026-03-16T12:36:00.459Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c799318a-1ab5-4d59-b5a1-a619ebbda0fb"} 2026-03-16T12:36:06.982Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "ae7505c5-e4e0-4372-84c1-22a8a5d8a527"} 2026-03-16T12:36:12.247Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "86b4be5b-23fd-4e13-9d73-2a61925a3f2a"} 2026-03-16T12:36:17.584Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "2495a2dd-ba00-4be9-8d9e-1326a5277d00"} 2026-03-16T12:36:23.166Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "b3343e20-dbba-4e10-bab3-92b0b6ee9b67"} 2026-03-16T12:36:28.182Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "a7690a73-7ccb-4a59-9d27-6a25f822118b"} 2026-03-16T12:36:33.249Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c4f84207-9c50-4a24-9e13-d490d5dd0f0e"} 2026-03-16T12:36:38.670Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "befa477e-7eab-4d5d-bcb6-bffa3a6b8376"} 2026-03-16T12:36:44.015Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "4e1f7230-277b-4300-8cef-ebceb90dd218"} 2026-03-16T12:36:49.171Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "91e71d2f-c3ec-4a6b-b430-b196ad100db3"} 2026-03-16T12:36:54.494Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "b137916e-8fb3-4224-853e-ba54b327f417"} 2026-03-16T12:36:59.553Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "bf07dfc4-e130-47f8-897d-c27e8cfd2580"} 2026-03-16T12:37:04.981Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "7cdcc752-ad53-4cac-9045-9a40a76b2fd6"} 2026-03-16T12:37:10.440Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "25c2be80-0bcb-420d-82a2-8e50af3c9e0a"} 2026-03-16T12:37:15.654Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "2bd80620-e2ee-4a9f-8955-fd14cd18ab97"} 2026-03-16T12:37:21.066Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "a7ed8c15-aa6c-4261-93b9-87384e4b5426"} 2026-03-16T12:37:26.489Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c3035eb3-4cdb-49f8-a1ff-a8672a2f1e6a"} 2026-03-16T12:37:32.045Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "ed46c788-a134-413a-a5fa-f5d306789a4a"} 2026-03-16T12:37:37.437Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "8c8bd914-befe-4c92-881c-c85bca80259a"} 2026-03-16T12:37:42.639Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "8f3bbe54-84a3-47b4-94b3-85bf2a8cfa62"} 2026-03-16T12:37:48.037Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "ba8c889d-69b5-4b3a-b19b-9619f58912c6"} 2026-03-16T12:37:53.256Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "5dd68339-cee7-4e69-81d9-9d5cd908307a"} 2026-03-16T12:37:58.566Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "5f39e067-8311-41a9-bdc5-0bd7e0ef0901"} 2026-03-16T12:38:03.890Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "a1b62797-513f-4379-8d80-a0d9f1497a2d"} 2026-03-16T12:38:08.968Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "f53a2552-d955-49fc-8205-edd8e0142fe1"} 2026-03-16T12:38:13.945Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "d8797ba8-5e4e-4717-91d9-d0510a38578d"} 2026-03-16T12:38:19.495Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "7263be77-f7e6-4921-9abe-d0fcc6bb09b2"} 2026-03-16T12:38:25.546Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "779eecd0-e879-43a3-956a-e6a8a5e73119"} 2026-03-16T12:38:30.390Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "b9b6cf07-52ca-4fb6-a32b-b6275fa29f81"} 2026-03-16T12:38:35.575Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "8ef766ec-fd50-4633-95de-ae70a368d1f6"} 2026-03-16T12:38:40.738Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "225f008a-d12d-44e4-83d9-67072961c8a1"} 2026-03-16T12:38:46.447Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "f812ebcc-9b2c-4e7b-9e1c-084181a6d8dc"} 2026-03-16T12:38:51.536Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c19a00c2-436f-4664-9458-16dbaca2871b"} 2026-03-16T12:38:56.976Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "3e963b49-18ed-4076-9364-30285ccdc013"} 2026-03-16T12:39:02.337Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "af6e8611-c0e1-4205-ad84-ab294341ae4e"} 2026-03-16T12:39:07.369Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "0d559f4a-b1c1-4b82-9fc3-4fc02a3cb1e2"} 2026-03-16T12:39:12.864Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "9f72f1dd-54ec-4c6f-9b67-5b27973370fa"} 2026-03-16T12:39:17.991Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "d82a69af-0ca9-4634-ae75-7d0bfae95173"} 2026-03-16T12:39:23.376Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "8f730ca6-141d-42c8-958c-0b77132947ce"} 2026-03-16T12:39:28.539Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c5c8d8b6-8368-4d64-89fb-e01a5b755323"} 2026-03-16T12:39:34.245Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "1e1f7197-6b6f-4761-8c9e-0ee98a752b37"} 2026-03-16T12:39:39.540Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "398662bd-38ed-4c0f-8f30-f81f971d5ec4"} 2026-03-16T12:39:42.649Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "3d7a0b4a-95ae-4afd-b127-b4ba8f97cd71", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-03-16T12:39:42.716Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "3d7a0b4a-95ae-4afd-b127-b4ba8f97cd71", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-03-16T12:39:45.012Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "cee27a78-54d2-45ae-8a75-a6e02656011b", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:832\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2026-03-16T12:40:25.363Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "16625f91-c297-4250-9b1a-b9831a90d907", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:832\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2026-03-16T12:40:29.796Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "a4ce47df-756d-43e9-ad5c-c2e33d20649e", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:832\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2026-03-16T12:40:35.906Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "0e27ad9e-e698-477b-aa35-9e6fb6020bb9"} 2026-03-16T12:40:40.106Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "6f92f0f7-8bc8-4972-bc75-29f26dbecec5"} 2026-03-16T12:40:45.386Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "41c1bb3e-1e48-45ca-8173-8d89226aa2ee"} 2026-03-16T12:40:50.679Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "0e19fe17-3689-4d7c-9ff3-3136b547bbea"} 2026-03-16T12:40:56.183Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "5dbf4560-7d56-4ecb-893f-1d95648f053b"} 2026-03-16T12:41:01.609Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "becb9bea-a4c9-4b20-830a-3b9e6c26058c"} 2026-03-16T12:41:06.797Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "5354052e-0b7d-43d6-a1c5-a8b080540693"} 2026-03-16T12:41:11.886Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "1d828d36-000b-4976-8e06-bdc84079c9c0"} 2026-03-16T12:41:17.384Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "5291d8db-a92f-429d-a039-6d72e4a7bd71"} 2026-03-16T12:41:22.688Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "6fdaffac-b3d6-4a3c-8ec9-941647cc2c1f"} 2026-03-16T12:41:27.688Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "b5648795-ade3-443e-bcc5-bc457114579e"} 2026-03-16T12:41:33.285Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "0a51c222-e111-4029-97a8-f9df043b2574"} 2026-03-16T12:41:38.585Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "1868f8c0-4d07-4cf8-b2ed-de8bd3b714b6"} 2026-03-16T12:41:43.793Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "51f500a5-e7cd-4008-80d6-184069bb2c1a"} 2026-03-16T12:41:48.905Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "dc8ece17-b457-4bfd-aa2e-a8cd5d4c7b4b"} 2026-03-16T12:41:54.395Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "771625c6-ea19-44b6-8b9d-16927cd9bdbf"} 2026-03-16T12:41:59.783Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "24e79eeb-4a8f-475b-8430-38e79d4c72d5"} 2026-03-16T12:42:04.987Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "078c8e86-09d4-48db-ae77-896c64d4be9b"} 2026-03-16T12:42:10.184Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "8f85f48b-eda1-4b98-86ec-554c4e25ba51"} 2026-03-16T12:42:15.500Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "a9e276a7-670c-4c09-852d-3f111e5efcf7"} 2026-03-16T12:42:19.313Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "8b546f97-bdad-4299-bceb-d776bbbdef12", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-03-16T12:42:19.399Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "8b546f97-bdad-4299-bceb-d776bbbdef12", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-03-16T12:42:20.904Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "1e94a3c2-b5af-4be7-8a4a-8beef873bcb1", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:832\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2026-03-16T12:43:14.813Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "516dc1ab-51b5-47a0-a847-2cf4b11ac408"} 2026-03-16T12:43:19.810Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "edb39b6d-c724-4738-8f20-7c0a7dd4e8bf"} 2026-03-16T12:43:24.823Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "869aad35-3ca5-46b5-9d25-5bbcd55a16fa"} 2026-03-16T12:43:30.120Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "2667fd12-3069-4105-b45f-b214809ff702"} 2026-03-16T12:43:35.221Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "08ce1f8b-cc57-4517-8c8a-d93488dc6b44"} 2026-03-16T12:43:40.699Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "6054b2b5-6dba-4dbc-b955-4669a9195fae"} 2026-03-16T12:43:46.019Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "b26a9bfc-b6be-40c2-bf25-3cfeb3196b0e"} 2026-03-16T12:43:51.227Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "24744db4-6c55-4c57-a50a-960cf0cfa00d"} 2026-03-16T12:43:56.618Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "af121031-98f0-46c1-9295-c9ae1534e724"} 2026-03-16T12:44:02.191Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "be3eb276-2ee5-48e1-9e32-93fcc836e410"} 2026-03-16T12:44:07.493Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "4ef3e7f9-cfaf-4c2c-bac7-e24e05d7938d"} 2026-03-16T12:44:12.704Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "3b43f489-a021-438a-8877-5fd9e4ea8d0b"} 2026-03-16T12:44:21.542Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c08b7ba5-5f77-4cef-8e84-f34beddcdeb9", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-20685.svc.cluster.local"} 2026-03-16T12:44:21.690Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "916cc564-dc52-407a-8f00-1ec1e439f304", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-20685.svc.cluster.local"} 2026-03-16T12:44:26.863Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "aefcbc4e-cdd3-4022-8a8d-96e8e3ac7360", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-20685.svc.cluster.local"} 2026-03-16T12:44:32.013Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "6ad4b9f7-4e3f-4c5d-a394-ea3ef44ca6a7", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-20685.svc.cluster.local"} 2026-03-16T12:44:37.155Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "616992ea-e8be-43f3-82ed-b5448bede20f", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-20685.svc.cluster.local"} 2026-03-16T12:45:11.629Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "86b32a6d-3149-4a9d-b24d-c43c6d1e5703"} 2026-03-16T12:45:17.127Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "2222654b-0b88-45bc-b4fd-7504930967a4"} 2026-03-16T12:45:19.594Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "426a6a8f-6225-47a3-9efc-aabfbaafe883", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-03-16T12:45:19.654Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "426a6a8f-6225-47a3-9efc-aabfbaafe883", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-03-16T12:45:22.108Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "426a6a8f-6225-47a3-9efc-aabfbaafe883"} 2026-03-16T12:48:05.589Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "9a8a8ce6-cc78-491c-8a49-7691e36b0252"} 2026-03-16T12:48:12.005Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "dcf0ae5d-dd1f-49cb-be8d-16b915df9420"} 2026-03-16T12:48:17.331Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "d3ebae6d-c652-488f-89d3-48c6e50dc6df"} 2026-03-16T12:48:22.603Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "ccf914a8-e3f9-4840-8b2c-9afd98ffbd81"} 2026-03-16T12:48:28.103Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "6750bede-ce29-4095-b79e-63d0a03cebc9"} 2026-03-16T12:48:33.421Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "cc5f56c7-505e-4e5e-94bf-f4fdf14cb9c5"} 2026-03-16T12:48:38.591Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "f57e03a1-54f5-46b3-8b5d-dab8e231ec98"} 2026-03-16T12:48:44.498Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "3646e94f-b2a2-4f17-8711-b644d51669b3"} 2026-03-16T12:48:49.816Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "93fead83-6527-4152-ac41-68a9f1681c12"} 2026-03-16T12:48:55.418Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "efed6788-884a-4ee1-a5bd-481dbe960048"} 2026-03-16T12:49:00.403Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "b9ecd590-ca0a-4cc8-85a5-58e141d73d04"} 2026-03-16T12:49:06.127Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "ddab7230-9a72-4f2d-8cec-519cf2b07af8"} 2026-03-16T12:49:11.714Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "68f1fd7e-f6ba-441f-aba8-96b3c6b7d434"} 2026-03-16T12:49:17.387Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "0cd941b4-3913-401d-bf04-c8341e12247e"} 2026-03-16T12:49:22.780Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "f163b410-8298-4d17-9d0d-f4e0b71ca4aa"} 2026-03-16T12:49:28.078Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "81fbbc50-baf7-4972-8bb8-c6b6c370a39d"} 2026-03-16T12:49:33.478Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "9254ac53-622d-4ff6-8fea-f4683cf9d113"} 2026-03-16T12:49:39.145Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c6b16eca-f0cf-4ae0-b22e-cbbfd00acb24"} 2026-03-16T12:49:44.419Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "7ceb6ee5-b162-487a-9eef-d78cdf4d24ae"} 2026-03-16T12:49:50.089Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "27276b94-c878-4654-be95-faf14addccd2"} 2026-03-16T12:49:55.704Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "352d686b-3696-4a48-bba0-1b4903871e2c"} 2026-03-16T12:50:01.002Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "4c64227f-5a19-48a6-baad-4494b2bd8f36"} 2026-03-16T12:50:06.412Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "fa739746-622c-4f06-9b3a-dd60dcf90286"} 2026-03-16T12:50:08.172Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c3819d66-47f6-46ab-b54a-46a6c6bb590d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-03-16T12:50:08.285Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c3819d66-47f6-46ab-b54a-46a6c6bb590d", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-03-16T12:50:11.798Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c3819d66-47f6-46ab-b54a-46a6c6bb590d", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:832\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2026-03-16T12:50:50.585Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "5a303365-404e-4063-855b-622b60c51c80", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:832\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2026-03-16T12:50:59.101Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "92ff1c1d-f3d2-4bee-a2bc-5cf9c07cf6e7"} 2026-03-16T12:51:04.290Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c2904e90-366c-4383-ba3b-15dc34657115"} 2026-03-16T12:51:09.805Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "8196c674-ee42-4807-b556-6b1bce67a654"} 2026-03-16T12:51:15.167Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "d0f920e8-45dc-4d68-adb9-ab8fc37412a6"} 2026-03-16T12:51:20.387Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "e2abd685-cba5-4e2e-a8f9-78a3dfd5435b"} 2026-03-16T12:51:26.062Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "807e834f-0678-44aa-90bc-73ee49a4330a"} 2026-03-16T12:51:31.760Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "52851ff4-1e1a-40ec-ba52-c5699da8ea4a"} 2026-03-16T12:51:37.388Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "d2a31821-44b4-4346-8b61-5556e5d18359"} 2026-03-16T12:51:42.660Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "3e4868c8-773d-4633-b509-46e4b6bf9643"} 2026-03-16T12:51:47.892Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "cd9446e3-5e89-4870-90bd-eee55b71e407"} 2026-03-16T12:51:53.364Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "a37aa618-cac0-4d71-b7ac-eba424b33cdf"} 2026-03-16T12:51:58.906Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "216e55d7-4d72-46cc-ba97-04155acbeb36"} 2026-03-16T12:52:04.211Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c899e713-fca5-4a7c-9a34-73a443cfa290"} 2026-03-16T12:52:09.611Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "a57157ec-d1d9-4faa-9864-1b1dd6f70031"} 2026-03-16T12:52:14.907Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "4ce0624b-2d42-4207-915f-ada69986f124"} 2026-03-16T12:52:20.389Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "b52e9e9f-ff3d-4a26-b156-91cadf1b6113"} 2026-03-16T12:52:25.900Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "a6c67b3b-7f61-40e1-9112-478e65c5565f"} 2026-03-16T12:52:31.260Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "a54b8eba-7a23-4164-b2c5-1d3fd8854ccd"} 2026-03-16T12:52:36.779Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "bee09f5b-b4ca-4652-8c47-5becfe7f4862"} 2026-03-16T12:52:42.061Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "e4d7ba0e-2400-4e70-b0fa-d7efedc0b18c"} 2026-03-16T12:52:47.086Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "bbd476d0-96cb-475f-851f-014c124121e5"} 2026-03-16T12:52:47.457Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "5fdd58a8-5928-4f29-9ffd-eb479e392ccf", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-03-16T12:52:47.558Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "5fdd58a8-5928-4f29-9ffd-eb479e392ccf", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-03-16T12:52:50.994Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "5fdd58a8-5928-4f29-9ffd-eb479e392ccf", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:832\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2026-03-16T12:53:42.959Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "a719f496-632e-45c8-8cad-49b9fc8bdf4f"} 2026-03-16T12:53:48.960Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "f3f99283-5f9a-4453-8b0f-3a63db4f4a1e"} 2026-03-16T12:53:54.355Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "cbce7ca6-7bba-4717-836d-6b6b037e1927"} 2026-03-16T12:53:59.651Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "820feeb0-0eec-4e79-a5c2-6c90dd4a1cfe"} 2026-03-16T12:54:05.227Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "4ac95dcd-7dbb-437c-9914-5d25ebcb70aa"} 2026-03-16T12:54:10.525Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "f99c1802-9a21-4e23-977d-538858bf3ca1"} 2026-03-16T12:54:15.858Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "212d73d3-fd79-4294-a6b9-7f4b2ad672c1"} 2026-03-16T12:54:21.535Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "67450d80-5f54-4786-94f6-caf4d37475e5"} 2026-03-16T12:54:26.915Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "d5cb47b5-05ff-4432-8527-ecde0898f13e"} 2026-03-16T12:54:32.235Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "92eb584f-6e8c-4582-aeb9-2785063f2b73"} 2026-03-16T12:54:38.143Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "2c88a936-d597-44ad-9f11-1e9cdafdd4a9"} 2026-03-16T12:54:43.427Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "7a3c4b4f-b787-4ac2-87d1-f4372f09c04a"} 2026-03-16T12:54:48.623Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "517f13bc-0141-41e8-b168-7360c54a7581"} 2026-03-16T12:54:54.151Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "2568720d-60d3-44c7-bc82-da1708372d5e"} 2026-03-16T12:54:56.790Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "fff2de91-6a38-47f3-87c9-5bc3798e15ff", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-03-16T12:54:56.850Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "fff2de91-6a38-47f3-87c9-5bc3798e15ff", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-03-16T12:54:59.912Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "61b2a5ce-f7b1-43a5-9b4c-373319ebb9bf"} 2026-03-16T12:55:05.190Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "1de46fd6-5a2e-4226-b46d-716397edc749", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-4: dial tcp: lookup some-name-pxc-4.some-name-pxc.proxysql-scheduler-20685 on 34.118.224.10:53: no such host"} 2026-03-16T12:55:10.553Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "3ae72e23-134e-491a-bfcf-e3a44b536028", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-4: dial tcp: lookup some-name-pxc-4.some-name-pxc.proxysql-scheduler-20685 on 34.118.224.10:53: no such host"} 2026-03-16T12:55:26.499Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "b653f37b-b5db-4c22-8cab-2a653f22f7d5", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-3: dial tcp: lookup some-name-pxc-3.some-name-pxc.proxysql-scheduler-20685 on 34.118.224.10:53: no such host"} 2026-03-16T12:55:31.360Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "2427c468-f632-4db4-9579-e42c12f30ef5"} 2026-03-16T12:55:37.604Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "60aa6231-1dac-43c1-96d9-27356b062d15"} 2026-03-16T12:55:43.033Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "78f04466-a838-4036-a5cf-b2b4531b3a3f"} 2026-03-16T12:55:48.310Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "6a396522-2076-4ad0-b17b-63e44a484371"} 2026-03-16T12:55:53.717Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "80078f24-029d-4c63-bcbb-62569f059d5c"} 2026-03-16T12:55:58.843Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "d30b9836-65dd-4e2b-9c79-846b97e7b602"} 2026-03-16T12:56:04.055Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "457d1815-3616-4ca3-9b65-08b7a56ea7a2"} 2026-03-16T12:56:09.928Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "c8c3ed8b-89c1-4a06-a89b-679f1259d81d"} 2026-03-16T12:56:15.105Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "38f401bf-3df0-43fa-96c8-36fd182b0674"} 2026-03-16T12:56:20.414Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "6fc1bded-7a5a-4e0f-9f72-ec069ad0989b"} 2026-03-16T12:56:25.441Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "5e865ec2-b869-4b76-8628-d187254d9489"} 2026-03-16T12:56:30.909Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "6e3d71d4-c425-492c-b726-ed63ec1763d6"} 2026-03-16T12:56:36.011Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "913cf7a0-8358-4540-bed3-9aaec7f54fe2"} 2026-03-16T12:56:41.538Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "cc3c3c62-cebf-4817-b6f6-6a304e013dba"} 2026-03-16T12:56:49.103Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "3191bd2d-707f-4bd8-adf5-a9c838b44c5e", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.proxysql-scheduler-20685 on 34.118.224.10:53: no such host"} 2026-03-16T12:57:06.398Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-20685"}, "namespace": "proxysql-scheduler-20685", "name": "some-name", "reconcileID": "91bab11b-b484-435d-ad2e-c1e6940033c2", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.proxysql-scheduler-20685 on 34.118.224.10:53: no such host"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:834 -  }, -  { -  }, -  { -  }, -  }, +  }, +  { +  }, +  }, -  Annotations: map[string]string{ +  Annotations: map[string]string{ -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  Args: []string{ +  Args: []string{ +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, -  AvailableReplicas: 5, -  CollisionCount: &0, +  CollisionCount: nil, -  Command: []string{"/opt/percona/proxysql-entrypoint.sh"}, +  Command: []string{"/opt/percona/proxysql-entrypoint.sh"}, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2026-03-16 12:31:53 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, -  CurrentReplicas: 5, +  CurrentRevision: "", -  CurrentRevision: "some-name-proxysql-59ffb96cb6", -  CurrentRevision: "some-name-proxysql-85c96989b", -  CurrentRevision: "some-name-pxc-5bd5b497cc", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, +  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, -  Env: []v1.EnvVar{ +  Env: []v1.EnvVar{ -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Image: "perconalab/percona-xtradb-cluster-operator:main-proxysql", +  Image: "perconalab/percona-xtradb-cluster-operator:main-proxysql", -  ImagePullPolicy: "Always", +  ImagePullPolicy: "Always", +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, +  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "OPERATOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, +  {Name: "OPERATOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, +  {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, +  {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, -  Name: "proxysql-monit", +  Name: "proxysql-monit", -  {Name: "PROXYSQL_SERVICE", Value: "some-name-proxysql-unready"}, +  {Name: "PROXYSQL_SERVICE", Value: "some-name-proxysql-unready"}, -  {Name: "SCHEDULER_ENABLED", Value: "true"}, +  {Name: "SCHEDULER_ENABLED", Value: "true"}, +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  "-on-change=/opt/percona/proxysql_add_proxysql_nodes.sh", +  "-on-change=/opt/percona/proxysql_add_proxysql_nodes.sh", -  Operation: "Update", -  Operation: "Update", -  "/opt/percona/peer-list", +  "/opt/percona/peer-list", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzOTktZGJmY2NhMWQiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzOTktZGJmY2NhMWQiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifSx7Im5hbWUiOiJTQ0hFRFVMRVJfRU5BQkxFRCIsInZhbHVlIjoidHJ1ZSJ9XSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI3MDBtIiwibWVtb3J5IjoiMUcifSwicmVxdWVzdHMiOnsiY3B1IjoiMTAwbSIsIm1lbW9y"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzOTktZGJmY2NhMWQiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzOTktZGJmY2NhMWQiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifSx7Im5hbWUiOiJTQ0hFRFVMRVJfRU5BQkxFRCIsInZhbHVlIjoidHJ1ZSJ9XSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI3MDBtIiwibWVtb3J5IjoiMUcifSwicmVxdWVzdHMiOnsiY3B1IjoiMTAwbSIsIm1lbW9y"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzOTktZGJmY2NhMWQiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzOTktZGJmY2NhMWQiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifV0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNzAwbSIsIm1lbW9yeSI6IjFHIn0sInJlcXVlc3RzIjp7ImNwdSI6IjEwMG0iLCJtZW1vcnkiOiIxMDBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJwcm94eWRh"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzOTktZGJmY2NhMWQiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzOTktZGJmY2NhMWQiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifV0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNzAwbSIsIm1lbW9yeSI6IjFHIn0sInJlcXVlc3RzIjp7ImNwdSI6IjEwMG0iLCJtZW1vcnkiOiIxMDBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJwcm94eWRh"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzOTktZGJmY2NhMWQiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzOTktZGJmY2NhMWQiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifSx7Im5hbWUiOiJTQ0hFRFVMRVJfRU5BQkxFRCIsInZhbHVlIjoidHJ1ZSJ9XSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI3MDBtIiwibWVtb3J5IjoiMUcifSwicmVxdWVzdHMiOnsiY3B1IjoiMTAwbSIsIm1lbW9y"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzOTktZGJmY2NhMWQiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzOTktZGJmY2NhMWQiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifSx7Im5hbWUiOiJTQ0hFRFVMRVJfRU5BQkxFRCIsInZhbHVlIjoidHJ1ZSJ9XSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI3MDBtIiwibWVtb3J5IjoiMUcifSwicmVxdWVzdHMiOnsiY3B1IjoiMTAwbSIsIm1lbW9y"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6NSwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6NSwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, -  ReadyReplicas: 5, +  Replicas: 0, -  Replicas: 2, -  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, -  Replicas: 5, -  Replicas: &5, +  Replicas: &5, +  ResourceVersion: "", -  ResourceVersion: "1773664358742095004", -  ResourceVersion: "1773664836636623004", -  ResourceVersion: "1773664974862063004", -  ResourceVersion: "1773665106252367017", -  ResourceVersion: "1773665279053503017", -  ResourceVersion: "1773665438145231004", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  "-service=$(PROXYSQL_SERVICE)", "-protocol=$(PEER_LIST_SRV_PROTOCOL)", +  "-service=$(PROXYSQL_SERVICE)", "-protocol=$(PEER_LIST_SRV_PROTOCOL)", -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2026-03-16 12:31:53 +0000 UTC", -  Time: s"2026-03-16 12:32:38 +0000 UTC", -  Time: s"2026-03-16 12:39:42 +0000 UTC", -  Time: s"2026-03-16 12:40:36 +0000 UTC", -  Time: s"2026-03-16 12:42:19 +0000 UTC", -  Time: s"2026-03-16 12:42:54 +0000 UTC", -  Time: s"2026-03-16 12:45:06 +0000 UTC", -  Time: s"2026-03-16 12:45:19 +0000 UTC", -  Time: s"2026-03-16 12:47:59 +0000 UTC", -  Time: s"2026-03-16 12:50:08 +0000 UTC", -  Time: s"2026-03-16 12:50:38 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "685f2796-99ce-465f-bc91-c4f89efda4aa", -  UID: "a70b9dbc-3933-45c0-950c-8276abbde2e8", +  UpdatedReplicas: 0, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, -  UpdatedReplicas: 5, +  UpdateRevision: "", -  UpdateRevision: "some-name-proxysql-59ffb96cb6", -  UpdateRevision: "some-name-proxysql-85c96989b", -  UpdateRevision: "some-name-pxc-5bd5b497cc", +  Value: "0", -  Value: "1", -  VolumeMode: &"Filesystem", +  VolumeMode: nil, -  VolumeMounts: []v1.VolumeMount{{Name: "bin", MountPath: "/opt/percona"}}, +  VolumeMounts: []v1.VolumeMount{{Name: "bin", MountPath: "/opt/percona"}},   }    },    },    {    },    },    {    },    },    {    },    ... // 11 identical elements    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical elements    ... // 4 identical fields    ... // 5 identical elements    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 8 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: nil,    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    Containers: []v1.Container{    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...},    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostIP: "",    HostPort: 0,    ImagePullPolicy: "Always",    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    Labels: nil,    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-proxysql"},    LocalObjectReference: {Name: "some-name-pxc"},    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    Name: "config",    Name: "ist",    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "PERCONA_SCHEDULER_CFG", Value: "/tmp/scheduler-config.toml"},    Name: "proxyadm",    {Name: "SCHEDULER_CHECKTIMEOUT", Value: "2000"},    {Name: "SCHEDULER_MAXCONNECTIONS", Value: "1000"},    {Name: "SCHEDULER_RETRYDOWN", Value: "3"},    {Name: "SCHEDULER_RETRYUP", Value: "1"},    Name: "SCHEDULER_WRITERALSOREADER",    Namespace: "proxysql-scheduler-20685",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "c00e89d30174d6aa7ad8388d7e21a27b", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "a7332024-83ce-4c37-9c8b-7c3cb3f2df19", ...}},    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &2,    Replicas: &3,    ResizePolicy: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "700m", Format: "DecimalSI"}, s"memory": {i: {...}, s: "1G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}},    SecretName: "internal-some-name",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    ValueFrom: nil,    VolumeAttributesClassName: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n proxysql-scheduler-20685 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.wzgDL3rw17 ++ mktemp + local LAST_ERR=/tmp/tmp.n1Rzp0dJKS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wzgDL3rw17 perconaxtradbcluster.pxc.percona.com "some-name" deleted from proxysql-scheduler-20685 namespace + cat /tmp/tmp.n1Rzp0dJKS + rm /tmp/tmp.wzgDL3rw17 /tmp/tmp.n1Rzp0dJKS + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HWAz2QO2RT ++ mktemp + local LAST_ERR=/tmp/tmp.I7ZirQA9M0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HWAz2QO2RT No resources found + cat /tmp/tmp.I7ZirQA9M0 + rm /tmp/tmp.HWAz2QO2RT /tmp/tmp.I7ZirQA9M0 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.lblvcTQbsF ++ mktemp + local LAST_ERR=/tmp/tmp.IU55tQQN3T + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lblvcTQbsF No resources found + cat /tmp/tmp.IU55tQQN3T + rm /tmp/tmp.lblvcTQbsF /tmp/tmp.IU55tQQN3T + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.tmCiAUIlAu ++ mktemp + local LAST_ERR=/tmp/tmp.EeJkzF2cgU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tmCiAUIlAu validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.EeJkzF2cgU + rm /tmp/tmp.tmCiAUIlAu /tmp/tmp.EeJkzF2cgU + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace proxysql-scheduler-20685 + rm -rf /tmp/tmp.zoZKt1BYg2 + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.bCakuWz0WR ++ mktemp + local LAST_OUT=/tmp/tmp.PlqoUou5JJ ++ mktemp + local LAST_ERR=/tmp/tmp.ADXR1yVVbm + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.Q5XPoZ1Trt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace proxysql-scheduler-20685 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator