Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/logs/proxysql-scheduler-8-0.log Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.32) exceeds the supported minor version skew of +/-1 + cluster=some-name + create_infra proxysql-scheduler-16506 + local ns=proxysql-scheduler-16506 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n proxysql-scheduler-23384 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HLlvO6ehZ3 ++ mktemp + local LAST_ERR=/tmp/tmp.NdlaIRQsvC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HLlvO6ehZ3 perconaxtradbcluster.pxc.percona.com "some-name" deleted from proxysql-scheduler-23384 namespace + cat /tmp/tmp.NdlaIRQsvC + rm /tmp/tmp.HLlvO6ehZ3 /tmp/tmp.NdlaIRQsvC + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.J8mLCW54fk ++ mktemp + local LAST_ERR=/tmp/tmp.ZdKBHwNcwl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.J8mLCW54fk No resources found + cat /tmp/tmp.ZdKBHwNcwl + rm /tmp/tmp.J8mLCW54fk /tmp/tmp.ZdKBHwNcwl + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.QXZpGAkbxy ++ mktemp + local LAST_ERR=/tmp/tmp.UPujdyd0hV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QXZpGAkbxy No resources found + cat /tmp/tmp.UPujdyd0hV + rm /tmp/tmp.QXZpGAkbxy /tmp/tmp.UPujdyd0hV + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// ++ helm list --all-namespaces --filter chaos-mesh + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl api-resources ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrole ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + xargs kubectl delete ns + local LAST_OUT=/tmp/tmp.hOFccvgAD0 ++ mktemp + local LAST_OUT=/tmp/tmp.Z28HM1mc8W ++ mktemp + awk '{print$1}' + local LAST_ERR=/tmp/tmp.BT8IWsaOSF + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.3ud6xo784l + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Z28HM1mc8W + cat /tmp/tmp.3ud6xo784l + rm /tmp/tmp.Z28HM1mc8W /tmp/tmp.3ud6xo784l + return 0 namespace "proxysql-scheduler-23384" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hOFccvgAD0 namespace "pxc-operator" deleted + cat /tmp/tmp.BT8IWsaOSF + rm /tmp/tmp.hOFccvgAD0 /tmp/tmp.BT8IWsaOSF + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.IqwXhA07gL ++ mktemp + local LAST_ERR=/tmp/tmp.mJ74WdMwR7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IqwXhA07gL namespace/pxc-operator created + cat /tmp/tmp.mJ74WdMwR7 + rm /tmp/tmp.IqwXhA07gL /tmp/tmp.mJ74WdMwR7 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ir3YPIAx6K +++ mktemp ++ local LAST_ERR=/tmp/tmp.sLjs3aRgwK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ir3YPIAx6K ++ cat /tmp/tmp.sLjs3aRgwK ++ rm /tmp/tmp.Ir3YPIAx6K /tmp/tmp.sLjs3aRgwK ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2427-d4f7121d-1-cluster3 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.bMSLBcei8L ++ mktemp + local LAST_ERR=/tmp/tmp.6u20EpKwMd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2427-d4f7121d-1-cluster3 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bMSLBcei8L Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2427-d4f7121d-1-cluster3" modified. + cat /tmp/tmp.6u20EpKwMd + rm /tmp/tmp.bMSLBcei8L /tmp/tmp.6u20EpKwMd + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.IT51p8viBD ++ mktemp + local LAST_ERR=/tmp/tmp.uo9lSjlTHP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IT51p8viBD customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.uo9lSjlTHP + rm /tmp/tmp.IT51p8viBD /tmp/tmp.uo9lSjlTHP + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/deploy/cw-rbac.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.4sZrKmVw3e ++ mktemp + local LAST_ERR=/tmp/tmp.yF91otRy1o + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4sZrKmVw3e clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.yF91otRy1o + rm /tmp/tmp.4sZrKmVw3e /tmp/tmp.yF91otRy1o + return 0 + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.oeAbz5O9cP ++ mktemp + local LAST_ERR=/tmp/tmp.a2fef9WW7j + local exit_status=0 ++ seq 0 2 + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "PXCO_FEATURE_GATES").value) = ""' - + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2427-d4f7121d^' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oeAbz5O9cP deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.a2fef9WW7j + rm /tmp/tmp.oeAbz5O9cP /tmp/tmp.a2fef9WW7j + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.SY2Zp2GWKI ++ mktemp + local LAST_ERR=/tmp/tmp.AL5xUElPjH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SY2Zp2GWKI pod/percona-xtradb-cluster-operator-5b5dc68764-z6tb7 condition met + cat /tmp/tmp.AL5xUElPjH + rm /tmp/tmp.SY2Zp2GWKI /tmp/tmp.AL5xUElPjH + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ head -1 ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.M0rHwQlDjv +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ke8RtZJARg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M0rHwQlDjv ++ cat /tmp/tmp.Ke8RtZJARg ++ rm /tmp/tmp.M0rHwQlDjv /tmp/tmp.Ke8RtZJARg ++ return 0 + wait_pod percona-xtradb-cluster-operator-5b5dc68764-z6tb7 480 pxc-operator + local pod=percona-xtradb-cluster-operator-5b5dc68764-z6tb7 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-5b5dc68764-z6tb7 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-5b5dc68764-z6tb7 condition met waiting for pod/percona-xtradb-cluster-operator-5b5dc68764-z6tb7 to become Ready.Ok + sleep 3 + create_namespace proxysql-scheduler-16506 + local namespace=proxysql-scheduler-16506 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// ++ helm list --all-namespaces --filter chaos-mesh + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep validate-auth ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrolebinding ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces proxysql-scheduler-16506' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces proxysql-scheduler-16506 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace proxysql-scheduler-16506 + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.PEuSlxQbdK + local LAST_OUT=/tmp/tmp.RVEud47pH0 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.SUvilg4f1s + local LAST_ERR=/tmp/tmp.9zFT8MBfyg + local exit_status=0 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-16506 + awk '{print$1}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PEuSlxQbdK + cat /tmp/tmp.SUvilg4f1s + rm /tmp/tmp.PEuSlxQbdK /tmp/tmp.SUvilg4f1s + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-16506 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-16506 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.RVEud47pH0 + cat /tmp/tmp.9zFT8MBfyg Error from server (NotFound): namespaces "proxysql-scheduler-16506" not found + rm /tmp/tmp.RVEud47pH0 /tmp/tmp.9zFT8MBfyg + return 1 + : + wait_for_delete namespace/proxysql-scheduler-16506 + local res=namespace/proxysql-scheduler-16506 + echo -n 'waiting for namespace/proxysql-scheduler-16506 to be deleted' waiting for namespace/proxysql-scheduler-16506 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "proxysql-scheduler-16506" not found + desc 'create namespace proxysql-scheduler-16506' + set +o xtrace ----------------------------------------------------------------------------------- create namespace proxysql-scheduler-16506 ----------------------------------------------------------------------------------- + kubectl_bin create namespace proxysql-scheduler-16506 ++ mktemp + local LAST_OUT=/tmp/tmp.SIo05CMddw ++ mktemp + local LAST_ERR=/tmp/tmp.3F300xGPE0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace proxysql-scheduler-16506 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SIo05CMddw namespace/proxysql-scheduler-16506 created + cat /tmp/tmp.3F300xGPE0 + rm /tmp/tmp.SIo05CMddw /tmp/tmp.3F300xGPE0 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.k40aVCfFJ8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3V1xRf1XNF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k40aVCfFJ8 ++ cat /tmp/tmp.3V1xRf1XNF ++ rm /tmp/tmp.k40aVCfFJ8 /tmp/tmp.3V1xRf1XNF ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2427-d4f7121d-1-cluster3 --namespace=proxysql-scheduler-16506 ++ mktemp + local LAST_OUT=/tmp/tmp.QEq8nw9OgK ++ mktemp + local LAST_ERR=/tmp/tmp.83zyILFmIo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2427-d4f7121d-1-cluster3 --namespace=proxysql-scheduler-16506 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QEq8nw9OgK Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2427-d4f7121d-1-cluster3" modified. + cat /tmp/tmp.83zyILFmIo + rm /tmp/tmp.QEq8nw9OgK /tmp/tmp.83zyILFmIo + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.BkBzn5u2b4 ++ mktemp + local LAST_ERR=/tmp/tmp.TtV81WZuFd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BkBzn5u2b4 secret/minio-secret created secret/aws-s3-secret created secret/do-spaces-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.TtV81WZuFd + rm /tmp/tmp.BkBzn5u2b4 /tmp/tmp.TtV81WZuFd + return 0 + desc 'create PXC cluster: some-name' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster: some-name ----------------------------------------------------------------------------------- + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.8YW0Dt6MRJ ++ mktemp + local LAST_ERR=/tmp/tmp.cbaItBeMPD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8YW0Dt6MRJ secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.cbaItBeMPD + rm /tmp/tmp.8YW0Dt6MRJ /tmp/tmp.cbaItBeMPD + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/client.yml + local pvc_name= + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2427-d4f7121d#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.proxysql-scheduler-16506~ + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.WJLf5uGOCo ++ mktemp + local LAST_ERR=/tmp/tmp.mgyWfQi8wt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WJLf5uGOCo deployment.apps/pxc-client created + cat /tmp/tmp.mgyWfQi8wt + rm /tmp/tmp.WJLf5uGOCo /tmp/tmp.mgyWfQi8wt + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/conf/some-name.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_OUT=/tmp/tmp.B7IjymRhnM ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_ERR=/tmp/tmp.jvxct2PRI5 + local exit_status=0 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2427-d4f7121d#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.proxysql-scheduler-16506~ + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.B7IjymRhnM perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.jvxct2PRI5 + rm /tmp/tmp.B7IjymRhnM /tmp/tmp.jvxct2PRI5 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.jeJ2vKyecD ++++ mktemp +++ local LAST_ERR=/tmp/tmp.dFGle5volR +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.jeJ2vKyecD +++ cat /tmp/tmp.dFGle5volR +++ rm /tmp/tmp.jeJ2vKyecD /tmp/tmp.dFGle5volR +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.6FiUPpOaQn ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ydTlm5aucm +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.6FiUPpOaQn +++ cat /tmp/tmp.ydTlm5aucm +++ rm /tmp/tmp.6FiUPpOaQn /tmp/tmp.ydTlm5aucm +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-16506 ++ mktemp + local LAST_OUT=/tmp/tmp.cdwh5MfK6U ++ mktemp + local LAST_ERR=/tmp/tmp.BEiSqChIze + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-16506 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-16506 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-16506 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.cdwh5MfK6U + cat /tmp/tmp.BEiSqChIze error: no matching resources found + rm /tmp/tmp.cdwh5MfK6U /tmp/tmp.BEiSqChIze + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-proxysql-0 + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-pxc-1 + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.EpNVci4c0g +++ mktemp ++ local LAST_ERR=/tmp/tmp.E4kDRscaaR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EpNVci4c0g ++ cat /tmp/tmp.E4kDRscaaR ++ rm /tmp/tmp.EpNVci4c0g /tmp/tmp.E4kDRscaaR ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gBIu9l3DQr +++ mktemp ++ local LAST_ERR=/tmp/tmp.PKCrI0wv9k ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gBIu9l3DQr ++ cat /tmp/tmp.PKCrI0wv9k ++ rm /tmp/tmp.gBIu9l3DQr /tmp/tmp.PKCrI0wv9k ++ return 0 + client_pod=pxc-client-67fc4995bb-b5jpp + wait_pod pxc-client-67fc4995bb-b5jpp + local pod=pxc-client-67fc4995bb-b5jpp + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-b5jpp ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-b5jpp condition met waiting for pod/pxc-client-67fc4995bb-b5jpp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TCaxz7nhYk +++ mktemp ++ local LAST_ERR=/tmp/tmp.2zDsZdAN7h ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TCaxz7nhYk ++ cat /tmp/tmp.2zDsZdAN7h ++ rm /tmp/tmp.TCaxz7nhYk /tmp/tmp.2zDsZdAN7h ++ return 0 + client_pod=pxc-client-67fc4995bb-b5jpp + wait_pod pxc-client-67fc4995bb-b5jpp + local pod=pxc-client-67fc4995bb-b5jpp + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-b5jpp + local container= + set +o xtrace pod/pxc-client-67fc4995bb-b5jpp condition met waiting for pod/pxc-client-67fc4995bb-b5jpp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9CZ6mMu554 +++ mktemp ++ local LAST_ERR=/tmp/tmp.tjfNHUiE62 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9CZ6mMu554 ++ cat /tmp/tmp.tjfNHUiE62 ++ rm /tmp/tmp.9CZ6mMu554 /tmp/tmp.tjfNHUiE62 ++ return 0 + client_pod=pxc-client-67fc4995bb-b5jpp + wait_pod pxc-client-67fc4995bb-b5jpp + local pod=pxc-client-67fc4995bb-b5jpp + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-67fc4995bb-b5jpp + local container= + set +o xtrace pod/pxc-client-67fc4995bb-b5jpp condition met waiting for pod/pxc-client-67fc4995bb-b5jpp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.IHyuq6Rrjq/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.IHyuq6Rrjq/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.IHyuq6Rrjq/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cGRBO3OO6z +++ mktemp ++ local LAST_ERR=/tmp/tmp.w1kbqtKddn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cGRBO3OO6z ++ cat /tmp/tmp.w1kbqtKddn ++ rm /tmp/tmp.cGRBO3OO6z /tmp/tmp.w1kbqtKddn ++ return 0 + client_pod=pxc-client-67fc4995bb-b5jpp + wait_pod pxc-client-67fc4995bb-b5jpp + local pod=pxc-client-67fc4995bb-b5jpp + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-b5jpp ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-b5jpp condition met waiting for pod/pxc-client-67fc4995bb-b5jpp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.IHyuq6Rrjq/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.IHyuq6Rrjq/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.IHyuq6Rrjq/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wAxA1yRQBC +++ mktemp ++ local LAST_ERR=/tmp/tmp.U1NNoyFJio ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wAxA1yRQBC ++ cat /tmp/tmp.U1NNoyFJio ++ rm /tmp/tmp.wAxA1yRQBC /tmp/tmp.U1NNoyFJio ++ return 0 + client_pod=pxc-client-67fc4995bb-b5jpp + wait_pod pxc-client-67fc4995bb-b5jpp + local pod=pxc-client-67fc4995bb-b5jpp + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-b5jpp ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-b5jpp condition met waiting for pod/pxc-client-67fc4995bb-b5jpp to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.IHyuq6Rrjq/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.IHyuq6Rrjq/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.IHyuq6Rrjq/select-1.sql + is_keyring_plugin_in_use some-name + local cluster=some-name + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + kubectl exec some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + grep -E -o 'early-plugin-load=keyring_\w+.so' + return 1 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m3td8GFCNl +++ mktemp ++ local LAST_ERR=/tmp/tmp.9bbqss1YuH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m3td8GFCNl ++ cat /tmp/tmp.9bbqss1YuH ++ rm /tmp/tmp.m3td8GFCNl /tmp/tmp.9bbqss1YuH ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AlqFHdEKAF +++ mktemp ++ local LAST_ERR=/tmp/tmp.GJL3P74Pvx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AlqFHdEKAF ++ cat /tmp/tmp.GJL3P74Pvx ++ rm /tmp/tmp.AlqFHdEKAF /tmp/tmp.GJL3P74Pvx ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jR9e70lbmy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.d0sxSu74J0 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jR9e70lbmy +++++ cat /tmp/tmp.d0sxSu74J0 +++++ rm /tmp/tmp.jR9e70lbmy /tmp/tmp.d0sxSu74J0 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.CLvI3NB2MA ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.95zR9bqS47 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.CLvI3NB2MA +++++ cat /tmp/tmp.95zR9bqS47 +++++ rm /tmp/tmp.CLvI3NB2MA /tmp/tmp.95zR9bqS47 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.z3ourT2Vju +++ mktemp ++ local LAST_ERR=/tmp/tmp.3cx4KFYmbz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.z3ourT2Vju ++ cat /tmp/tmp.3cx4KFYmbz ++ rm /tmp/tmp.z3ourT2Vju /tmp/tmp.3cx4KFYmbz ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check proxysql custom config' + set +o xtrace ----------------------------------------------------------------------------------- check proxysql custom config ----------------------------------------------------------------------------------- + compare_proxysql_cfg some-name-proxysql-0 proxysql-cfg + local pod=some-name-proxysql-0 + local compare_file=proxysql-cfg + local 'query=SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' + compare_mysql_cmd_local proxysql-cfg 'SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=proxysql-cfg + local 'command=SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/proxysql-cfg.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/proxysql-cfg-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.IHyuq6Rrjq/proxysql-cfg.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/proxysql-cfg.sql /tmp/tmp.IHyuq6Rrjq/proxysql-cfg.sql + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-pxc + local resource=statefulset/some-name-pxc + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc.yml + local new_result=/tmp/tmp.IHyuq6Rrjq/statefulset_some-name-pxc.yml + desc 'compare statefulset/some-name-pxc-' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/some-name-pxc- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k124.yml ']' + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-aks.yml ']' + kubectl_bin get -o yaml statefulset/some-name-pxc ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-16506", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.xXgLVXLGOg ++ mktemp + local LAST_ERR=/tmp/tmp.HjyDOKi7Se + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-pxc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xXgLVXLGOg + cat /tmp/tmp.HjyDOKi7Se + rm /tmp/tmp.xXgLVXLGOg /tmp/tmp.HjyDOKi7Se + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc.yml /tmp/tmp.IHyuq6Rrjq/statefulset_some-name-pxc.yml + log 'compare_kubectl: statefulset/some-name-pxc OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:08:02+0000]' compare_kubectl: statefulset/some-name-pxc OK [2026-04-08T13:08:02+0000] compare_kubectl: statefulset/some-name-pxc OK + compare_kubectl statefulset/some-name-proxysql + local resource=statefulset/some-name-proxysql + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql.yml + local new_result=/tmp/tmp.IHyuq6Rrjq/statefulset_some-name-proxysql.yml + desc 'compare statefulset/some-name-proxysql-' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/some-name-proxysql- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ bc -l ++ echo '1.32 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k124.yml ']' + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-aks.yml ']' + kubectl_bin get -o yaml statefulset/some-name-proxysql + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-16506", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.LWSozSGJ1l ++ mktemp + local LAST_ERR=/tmp/tmp.PJgIPmLOhR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LWSozSGJ1l + cat /tmp/tmp.PJgIPmLOhR + rm /tmp/tmp.LWSozSGJ1l /tmp/tmp.PJgIPmLOhR + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql.yml /tmp/tmp.IHyuq6Rrjq/statefulset_some-name-proxysql.yml + log 'compare_kubectl: statefulset/some-name-proxysql OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:08:03+0000]' compare_kubectl: statefulset/some-name-proxysql OK [2026-04-08T13:08:03+0000] compare_kubectl: statefulset/some-name-proxysql OK + compare_kubectl service/some-name-pxc + local resource=service/some-name-pxc + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc.yml + local new_result=/tmp/tmp.IHyuq6Rrjq/service_some-name-pxc.yml + desc 'compare service/some-name-pxc-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-pxc- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k124.yml ']' + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-aks.yml ']' + kubectl_bin get -o yaml service/some-name-pxc ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-16506", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.kL75I17viZ ++ mktemp + local LAST_ERR=/tmp/tmp.pdwgmhC6NF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-pxc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kL75I17viZ + cat /tmp/tmp.pdwgmhC6NF + rm /tmp/tmp.kL75I17viZ /tmp/tmp.pdwgmhC6NF + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc.yml /tmp/tmp.IHyuq6Rrjq/service_some-name-pxc.yml + log 'compare_kubectl: service/some-name-pxc OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:08:04+0000]' compare_kubectl: service/some-name-pxc OK [2026-04-08T13:08:04+0000] compare_kubectl: service/some-name-pxc OK + compare_kubectl service/some-name-proxysql + local resource=service/some-name-proxysql + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql.yml + local new_result=/tmp/tmp.IHyuq6Rrjq/service_some-name-proxysql.yml + desc 'compare service/some-name-proxysql-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-proxysql- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k124.yml ']' + version_gt 1.22 ++ echo '1.32 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-oc.yml ']' + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-aks.yml ']' + kubectl_bin get -o yaml service/some-name-proxysql ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-16506", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.hbGdqTFwoy ++ mktemp + local LAST_ERR=/tmp/tmp.FnO5OoDdS5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hbGdqTFwoy + cat /tmp/tmp.FnO5OoDdS5 + rm /tmp/tmp.hbGdqTFwoy /tmp/tmp.FnO5OoDdS5 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql.yml /tmp/tmp.IHyuq6Rrjq/service_some-name-proxysql.yml + log 'compare_kubectl: service/some-name-proxysql OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:08:05+0000]' compare_kubectl: service/some-name-proxysql OK [2026-04-08T13:08:05+0000] compare_kubectl: service/some-name-proxysql OK + compare_kubectl service/some-name-proxysql-unready + local resource=service/some-name-proxysql-unready + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready.yml + local new_result=/tmp/tmp.IHyuq6Rrjq/service_some-name-proxysql-unready.yml + desc 'compare service/some-name-proxysql-unready-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-proxysql-unready- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.32 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.32 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k129.yml ']' + version_gt 1.27 ++ echo '1.32 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k127.yml ']' + version_gt 1.24 ++ echo '1.32 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k124.yml ']' + version_gt 1.22 ++ bc -l ++ echo '1.32 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k122.yml ']' + version_gt 1.21 ++ echo '1.32 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-oc.yml ']' + version_gt 1.29 ++ bc -l ++ echo '1.32 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-aks.yml ']' + kubectl_bin get -o yaml service/some-name-proxysql-unready + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-16506", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.U6e6X3kPFL ++ mktemp + local LAST_ERR=/tmp/tmp.H869afFsY8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-proxysql-unready + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.U6e6X3kPFL + cat /tmp/tmp.H869afFsY8 + rm /tmp/tmp.U6e6X3kPFL /tmp/tmp.H869afFsY8 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready.yml /tmp/tmp.IHyuq6Rrjq/service_some-name-proxysql-unready.yml + log 'compare_kubectl: service/some-name-proxysql-unready OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:08:06+0000]' compare_kubectl: service/some-name-proxysql-unready OK [2026-04-08T13:08:06+0000] compare_kubectl: service/some-name-proxysql-unready OK + sleep 120 + desc 'check if scheduler is enabled in all ProxySQL servers' + set +o xtrace ----------------------------------------------------------------------------------- check if scheduler is enabled in all ProxySQL servers ----------------------------------------------------------------------------------- + compare_scheduler some-name-proxysql-0 scheduler-0 + local pod=some-name-proxysql-0 + local compare_file=scheduler-0 + compare_mysql_cmd_local scheduler-0 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=scheduler-0 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-0-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.IHyuq6Rrjq/scheduler-0.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql /tmp/tmp.IHyuq6Rrjq/scheduler-0.sql + log 'scheduler is enabled in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:10:08+0000]' scheduler is enabled in some-name-proxysql-0: OK [2026-04-08T13:10:08+0000] scheduler is enabled in some-name-proxysql-0: OK + compare_scheduler some-name-proxysql-1 scheduler-1 + local pod=some-name-proxysql-1 + local compare_file=scheduler-1 + compare_mysql_cmd_local scheduler-1 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=scheduler-1 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.IHyuq6Rrjq/scheduler-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql /tmp/tmp.IHyuq6Rrjq/scheduler-1.sql + log 'scheduler is enabled in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:10:10+0000]' scheduler is enabled in some-name-proxysql-1: OK [2026-04-08T13:10:10+0000] scheduler is enabled in some-name-proxysql-1: OK + desc 'check if scheduler is doing its job in all ProxySQL servers' + set +o xtrace ----------------------------------------------------------------------------------- check if scheduler is doing its job in all ProxySQL servers ----------------------------------------------------------------------------------- + compare_mysql_servers some-name-proxysql-0 mysql-servers-0 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:10:12+0000]' mysql_servers are configured in some-name-proxysql-0: OK [2026-04-08T13:10:12+0000] mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:10:13+0000]' mysql_servers are configured in some-name-proxysql-1: OK [2026-04-08T13:10:13+0000] mysql_servers are configured in some-name-proxysql-1: OK + desc 'check disabling scheduler' + set +o xtrace ----------------------------------------------------------------------------------- check disabling scheduler ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"enabled": false}}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Zm0KBzoACB ++ mktemp + local LAST_ERR=/tmp/tmp.ayDnoKYxMH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"enabled": false}}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Zm0KBzoACB perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.ayDnoKYxMH + rm /tmp/tmp.Zm0KBzoACB /tmp/tmp.ayDnoKYxMH + return 0 + sleep_with_log 90 + local d=90 + log 'sleeping for 90 seconds' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:10:15+0000]' sleeping for 90 seconds [2026-04-08T13:10:15+0000] sleeping for 90 seconds + sleep 90 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YSSCgsz7Wz +++ mktemp ++ local LAST_ERR=/tmp/tmp.kvfhAE6TOA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YSSCgsz7Wz ++ cat /tmp/tmp.kvfhAE6TOA ++ rm /tmp/tmp.YSSCgsz7Wz /tmp/tmp.kvfhAE6TOA ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mCUn3WGdB3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.BW7yM6nK2r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mCUn3WGdB3 ++ cat /tmp/tmp.BW7yM6nK2r ++ rm /tmp/tmp.mCUn3WGdB3 /tmp/tmp.BW7yM6nK2r ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vG3MXNsMAy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.kEmgzntvTI +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vG3MXNsMAy +++++ cat /tmp/tmp.kEmgzntvTI +++++ rm /tmp/tmp.vG3MXNsMAy /tmp/tmp.kEmgzntvTI +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.phTadoTFnI ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.RAe0wpn2fr +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.phTadoTFnI +++++ cat /tmp/tmp.RAe0wpn2fr +++++ rm /tmp/tmp.phTadoTFnI /tmp/tmp.RAe0wpn2fr +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y7oTHiLT2l +++ mktemp ++ local LAST_ERR=/tmp/tmp.FFXSLMwIKw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y7oTHiLT2l ++ cat /tmp/tmp.FFXSLMwIKw ++ rm /tmp/tmp.Y7oTHiLT2l /tmp/tmp.FFXSLMwIKw ++ return 0 + [[ 2 == \2 ]] + echo + compare_scheduler some-name-proxysql-0 scheduler-0-disabled + local pod=some-name-proxysql-0 + local compare_file=scheduler-0-disabled + compare_mysql_cmd_local scheduler-0-disabled 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=scheduler-0-disabled + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-0-disabled.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-0-disabled-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.IHyuq6Rrjq/scheduler-0-disabled.sql ']' + sleep 20 + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-0-disabled.sql /tmp/tmp.IHyuq6Rrjq/scheduler-0-disabled.sql + log 'scheduler is disabled in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:12:21+0000]' scheduler is disabled in some-name-proxysql-0: OK [2026-04-08T13:12:21+0000] scheduler is disabled in some-name-proxysql-0: OK + compare_scheduler some-name-proxysql-1 scheduler-1-disabled + local pod=some-name-proxysql-1 + local compare_file=scheduler-1-disabled + compare_mysql_cmd_local scheduler-1-disabled 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=scheduler-1-disabled + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-1-disabled.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-1-disabled-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.IHyuq6Rrjq/scheduler-1-disabled.sql ']' + sleep 20 + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-1-disabled.sql /tmp/tmp.IHyuq6Rrjq/scheduler-1-disabled.sql + log 'scheduler is disabled in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:12:44+0000]' scheduler is disabled in some-name-proxysql-1: OK [2026-04-08T13:12:44+0000] scheduler is disabled in some-name-proxysql-1: OK + kubectl get pod NAME READY STATUS RESTARTS AGE pxc-client-67fc4995bb-b5jpp 2/2 Running 0 10m some-name-proxysql-0 3/3 Running 0 2m13s some-name-proxysql-1 3/3 Running 0 2m28s some-name-pxc-0 1/1 Running 0 10m some-name-pxc-1 1/1 Running 0 9m5s some-name-pxc-2 1/1 Running 0 7m45s + kubectl get pxc NAME ENDPOINT STATUS PXC PROXYSQL HAPROXY AGE some-name some-name-proxysql.proxysql-scheduler-16506 ready 3 2 10m + desc 'check enabling scheduler' + set +o xtrace ----------------------------------------------------------------------------------- check enabling scheduler ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"enabled": true}}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.9GxcWFtgnw ++ mktemp + local LAST_ERR=/tmp/tmp.vj9DUSSP93 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"enabled": true}}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9GxcWFtgnw perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.vj9DUSSP93 + rm /tmp/tmp.9GxcWFtgnw /tmp/tmp.vj9DUSSP93 + return 0 + sleep_with_log 90 + local d=90 + log 'sleeping for 90 seconds' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:12:48+0000]' sleeping for 90 seconds [2026-04-08T13:12:48+0000] sleeping for 90 seconds + sleep 90 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6DnBA8Mi3E +++ mktemp ++ local LAST_ERR=/tmp/tmp.dlOwkf8bRB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6DnBA8Mi3E ++ cat /tmp/tmp.dlOwkf8bRB ++ rm /tmp/tmp.6DnBA8Mi3E /tmp/tmp.dlOwkf8bRB ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NpQSyba2IJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.i4PtlWO7hf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NpQSyba2IJ ++ cat /tmp/tmp.i4PtlWO7hf ++ rm /tmp/tmp.NpQSyba2IJ /tmp/tmp.i4PtlWO7hf ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RtVdrJtdkV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.YoBABgBrCj +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RtVdrJtdkV +++++ cat /tmp/tmp.YoBABgBrCj +++++ rm /tmp/tmp.RtVdrJtdkV /tmp/tmp.YoBABgBrCj +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KrfZnlTnVJ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.UfFwiNLjLv +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KrfZnlTnVJ +++++ cat /tmp/tmp.UfFwiNLjLv +++++ rm /tmp/tmp.KrfZnlTnVJ /tmp/tmp.UfFwiNLjLv +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YsBpmlmH3Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.qSXl3kXLlw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YsBpmlmH3Z ++ cat /tmp/tmp.qSXl3kXLlw ++ rm /tmp/tmp.YsBpmlmH3Z /tmp/tmp.qSXl3kXLlw ++ return 0 + [[ 2 == \2 ]] + echo + compare_scheduler some-name-proxysql-0 scheduler-0 + local pod=some-name-proxysql-0 + local compare_file=scheduler-0 + compare_mysql_cmd_local scheduler-0 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=scheduler-0 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-0-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.IHyuq6Rrjq/scheduler-0.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql /tmp/tmp.IHyuq6Rrjq/scheduler-0.sql + log 'scheduler is enabled in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:14:30+0000]' scheduler is enabled in some-name-proxysql-0: OK [2026-04-08T13:14:30+0000] scheduler is enabled in some-name-proxysql-0: OK + compare_scheduler some-name-proxysql-1 scheduler-1 + local pod=some-name-proxysql-1 + local compare_file=scheduler-1 + compare_mysql_cmd_local scheduler-1 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=scheduler-1 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.IHyuq6Rrjq/scheduler-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql /tmp/tmp.IHyuq6Rrjq/scheduler-1.sql + log 'scheduler is enabled in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:14:32+0000]' scheduler is enabled in some-name-proxysql-1: OK [2026-04-08T13:14:32+0000] scheduler is enabled in some-name-proxysql-1: OK + desc 'check PXC pod 1 is promoted to writer when pod-0 is down' + set +o xtrace ----------------------------------------------------------------------------------- check PXC pod 1 is promoted to writer when pod-0 is down ----------------------------------------------------------------------------------- + LOOP_PID=30056 + echo LOOP_PID=30056 LOOP_PID=30056 + sleep 10 + for i in '{1..20}' + kubectl delete pod some-name-pxc-0 pod "some-name-pxc-0" deleted from proxysql-scheduler-16506 namespace + log 'waiting for pod0 to be removed from proxysql' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:14:42+0000]' waiting for pod0 to be removed from proxysql [2026-04-08T13:14:42+0000] waiting for pod0 to be removed from proxysql + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-pod0-down 30056 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-pod0-down + local loop_pid=30056 + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-pod0-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'mysql_servers are configured in some-name-proxysql-0 when pxc-0 is down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:14:44+0000]' mysql_servers are configured in some-name-proxysql-0 when pxc-0 is down: OK [2026-04-08T13:14:44+0000] mysql_servers are configured in some-name-proxysql-0 when pxc-0 is down: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-pod0-down 30056 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-pod0-down + local loop_pid=30056 + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-pod0-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'mysql_servers are configured in some-name-proxysql-1 when pxc-0 is down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:14:45+0000]' mysql_servers are configured in some-name-proxysql-1 when pxc-0 is down: OK [2026-04-08T13:14:45+0000] mysql_servers are configured in some-name-proxysql-1 when pxc-0 is down: OK + kill 30056 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/../functions: line 152: 30056 Terminated ( for i in {1..20}; do kubectl delete pod "${cluster}-pxc-0"; sleep 3; done ) + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VPFIxnMZwR +++ mktemp ++ local LAST_ERR=/tmp/tmp.h7q3GX3MGR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VPFIxnMZwR ++ cat /tmp/tmp.h7q3GX3MGR ++ rm /tmp/tmp.VPFIxnMZwR /tmp/tmp.h7q3GX3MGR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KNRzjmW7Yg +++ mktemp ++ local LAST_ERR=/tmp/tmp.FdPbFlgLEJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KNRzjmW7Yg ++ cat /tmp/tmp.FdPbFlgLEJ ++ rm /tmp/tmp.KNRzjmW7Yg /tmp/tmp.FdPbFlgLEJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eSNPAiizet +++ mktemp ++ local LAST_ERR=/tmp/tmp.Cay7JyrIgr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eSNPAiizet ++ cat /tmp/tmp.Cay7JyrIgr ++ rm /tmp/tmp.eSNPAiizet /tmp/tmp.Cay7JyrIgr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oti9Zw48zR +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jf9wmMtXse ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oti9Zw48zR ++ cat /tmp/tmp.Jf9wmMtXse ++ rm /tmp/tmp.oti9Zw48zR /tmp/tmp.Jf9wmMtXse ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GT16gnJOpT +++ mktemp ++ local LAST_ERR=/tmp/tmp.78awmFnYHF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GT16gnJOpT ++ cat /tmp/tmp.78awmFnYHF ++ rm /tmp/tmp.GT16gnJOpT /tmp/tmp.78awmFnYHF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9j6iVU4Pr5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.CgDg6a0tCO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9j6iVU4Pr5 ++ cat /tmp/tmp.CgDg6a0tCO ++ rm /tmp/tmp.9j6iVU4Pr5 /tmp/tmp.CgDg6a0tCO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 5 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8ciNpzq16W +++ mktemp ++ local LAST_ERR=/tmp/tmp.zmCLZ1Yciu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8ciNpzq16W ++ cat /tmp/tmp.zmCLZ1Yciu ++ rm /tmp/tmp.8ciNpzq16W /tmp/tmp.zmCLZ1Yciu ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GzWwb4lcTq +++ mktemp ++ local LAST_ERR=/tmp/tmp.ryFIxNpUh7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GzWwb4lcTq ++ cat /tmp/tmp.ryFIxNpUh7 ++ rm /tmp/tmp.GzWwb4lcTq /tmp/tmp.ryFIxNpUh7 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.es5lL4otLn ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.6tZCAC9fIw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.es5lL4otLn +++++ cat /tmp/tmp.6tZCAC9fIw +++++ rm /tmp/tmp.es5lL4otLn /tmp/tmp.6tZCAC9fIw +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qENfffEAkc ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MLIlANlGWP +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qENfffEAkc +++++ cat /tmp/tmp.MLIlANlGWP +++++ rm /tmp/tmp.qENfffEAkc /tmp/tmp.MLIlANlGWP +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7ZTkqaZXaL +++ mktemp ++ local LAST_ERR=/tmp/tmp.vI4jY61HJW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7ZTkqaZXaL ++ cat /tmp/tmp.vI4jY61HJW ++ rm /tmp/tmp.7ZTkqaZXaL /tmp/tmp.vI4jY61HJW ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check scaling PXC up to 5 replicas' + set +o xtrace ----------------------------------------------------------------------------------- check scaling PXC up to 5 replicas ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 5}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.CZuxNiv5vD ++ mktemp + local LAST_ERR=/tmp/tmp.xEUVZ2RHma + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 5}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CZuxNiv5vD perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.xEUVZ2RHma + rm /tmp/tmp.CZuxNiv5vD /tmp/tmp.xEUVZ2RHma + return 0 + wait_pod some-name-pxc-3 + local pod=some-name-pxc-3 + local max_retry=480 + local ns= ++ echo some-name-pxc-3 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-3 condition met waiting for pod/some-name-pxc-3 to become Ready.Ok + wait_pod some-name-pxc-4 + local pod=some-name-pxc-4 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo some-name-pxc-4 + local container=pxc + set +o xtrace pod/some-name-pxc-4 condition met waiting for pod/some-name-pxc-4 to become Ready.Ok + sleep 120 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-1 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-1 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'new mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:20:26+0000]' new mysql_servers are configured in some-name-proxysql-0: OK [2026-04-08T13:20:26+0000] new mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-1 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-1 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'new mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:20:27+0000]' new mysql_servers are configured in some-name-proxysql-1: OK [2026-04-08T13:20:27+0000] new mysql_servers are configured in some-name-proxysql-1: OK + desc 'check scaling ProxySQL up' + set +o xtrace ----------------------------------------------------------------------------------- check scaling ProxySQL up ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"size": 3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Uypp4hCppa ++ mktemp + local LAST_ERR=/tmp/tmp.bWVEVaJ2BM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"size": 3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Uypp4hCppa perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.bWVEVaJ2BM + rm /tmp/tmp.Uypp4hCppa /tmp/tmp.bWVEVaJ2BM + return 0 + wait_pod some-name-proxysql-2 + local pod=some-name-proxysql-2 + local max_retry=480 + local ns= ++ echo some-name-proxysql-2 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-2 condition met waiting for pod/some-name-proxysql-2 to become Ready.Ok + sleep 120 + compare_scheduler some-name-proxysql-2 scheduler-2 + local pod=some-name-proxysql-2 + local compare_file=scheduler-2 + compare_mysql_cmd_local scheduler-2 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + local command_id=scheduler-2 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.IHyuq6Rrjq/scheduler-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2427/e2e-tests/proxysql-scheduler/compare/scheduler-2.sql /tmp/tmp.IHyuq6Rrjq/scheduler-2.sql + log 'scheduler is enabled in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:22:56+0000]' scheduler is enabled in some-name-proxysql-2: OK [2026-04-08T13:22:56+0000] scheduler is enabled in some-name-proxysql-2: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2 + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-2' check runtime_mysql_servers in some-name-proxysql-2+ compare_mysql_cmd_local mysql-servers-2 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + echo + log 'mysql_servers are configured in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:22:58+0000]' mysql_servers are configured in some-name-proxysql-2: OK [2026-04-08T13:22:58+0000] mysql_servers are configured in some-name-proxysql-2: OK + desc 'check writerIsAlsoReader = false' + set +o xtrace ----------------------------------------------------------------------------------- check writerIsAlsoReader = false ----------------------------------------------------------------------------------- + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"writerIsAlsoReader": false}}}}' perconaxtradbcluster.pxc.percona.com/some-name patched + sleep 10 + wait_cluster_consistency some-name 5 3 + local cluster_name=some-name + local cluster_size=5 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aTqxnpSqNf +++ mktemp ++ local LAST_ERR=/tmp/tmp.qNcyvZU0Iu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aTqxnpSqNf ++ cat /tmp/tmp.qNcyvZU0Iu ++ rm /tmp/tmp.aTqxnpSqNf /tmp/tmp.qNcyvZU0Iu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a9O5IoeVP8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vhsAumBljL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a9O5IoeVP8 ++ cat /tmp/tmp.vhsAumBljL ++ rm /tmp/tmp.a9O5IoeVP8 /tmp/tmp.vhsAumBljL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MbXHkayNb6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6GE0a88AcW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MbXHkayNb6 ++ cat /tmp/tmp.6GE0a88AcW ++ rm /tmp/tmp.MbXHkayNb6 /tmp/tmp.6GE0a88AcW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CQTvQCfh8X +++ mktemp ++ local LAST_ERR=/tmp/tmp.pU3fsePkWM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CQTvQCfh8X ++ cat /tmp/tmp.pU3fsePkWM ++ rm /tmp/tmp.CQTvQCfh8X /tmp/tmp.pU3fsePkWM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yXZZUJQL0C +++ mktemp ++ local LAST_ERR=/tmp/tmp.rbUGsCS8pZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yXZZUJQL0C ++ cat /tmp/tmp.rbUGsCS8pZ ++ rm /tmp/tmp.yXZZUJQL0C /tmp/tmp.rbUGsCS8pZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QvEsboVf0u +++ mktemp ++ local LAST_ERR=/tmp/tmp.S8j60juBed ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QvEsboVf0u ++ cat /tmp/tmp.S8j60juBed ++ rm /tmp/tmp.QvEsboVf0u /tmp/tmp.S8j60juBed ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eL0sTWbBdz +++ mktemp ++ local LAST_ERR=/tmp/tmp.m6xrN9vuNb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eL0sTWbBdz ++ cat /tmp/tmp.m6xrN9vuNb ++ rm /tmp/tmp.eL0sTWbBdz /tmp/tmp.m6xrN9vuNb ++ return 0 + [[ 5 == \5 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RFhln70JBZ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.IMMlCwmKnE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RFhln70JBZ +++++ cat /tmp/tmp.IMMlCwmKnE +++++ rm /tmp/tmp.RFhln70JBZ /tmp/tmp.IMMlCwmKnE +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.CGGOIEOStX ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.8kb7gQHMCk +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.CGGOIEOStX +++++ cat /tmp/tmp.8kb7gQHMCk +++++ rm /tmp/tmp.CGGOIEOStX /tmp/tmp.8kb7gQHMCk +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XTcyutFlDQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.09C29NN311 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XTcyutFlDQ ++ cat /tmp/tmp.09C29NN311 ++ rm /tmp/tmp.XTcyutFlDQ /tmp/tmp.09C29NN311 ++ return 0 + [[ 3 == \3 ]] + echo + sleep 60 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-1-writerNotReader + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-1-writerNotReader + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-1-writerNotReader 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'new mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:24:55+0000]' new mysql_servers are configured in some-name-proxysql-0: OK [2026-04-08T13:24:55+0000] new mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-1-writerNotReader + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-1-writerNotReader + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-1-writerNotReader 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'new mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:24:56+0000]' new mysql_servers are configured in some-name-proxysql-1: OK [2026-04-08T13:24:56+0000] new mysql_servers are configured in some-name-proxysql-1: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2-writerNotReader + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2-writerNotReader + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-2' check runtime_mysql_servers in some-name-proxysql-2+ compare_mysql_cmd_local mysql-servers-2-writerNotReader 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + echo + log 'mysql_servers are configured in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:24:58+0000]' mysql_servers are configured in some-name-proxysql-2: OK [2026-04-08T13:24:58+0000] mysql_servers are configured in some-name-proxysql-2: OK + desc 'check scaling PXC down to 3 replicas' + set +o xtrace ----------------------------------------------------------------------------------- check scaling PXC down to 3 replicas ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.hdfEqec4qN ++ mktemp + local LAST_ERR=/tmp/tmp.O6XvSbDfSR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hdfEqec4qN perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.O6XvSbDfSR + rm /tmp/tmp.hdfEqec4qN /tmp/tmp.O6XvSbDfSR + return 0 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9odSUCdL7C +++ mktemp ++ local LAST_ERR=/tmp/tmp.zeDBxU3bc2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9odSUCdL7C ++ cat /tmp/tmp.zeDBxU3bc2 ++ rm /tmp/tmp.9odSUCdL7C /tmp/tmp.zeDBxU3bc2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t4ZwICCa1U +++ mktemp ++ local LAST_ERR=/tmp/tmp.xSQoMKhEgd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t4ZwICCa1U ++ cat /tmp/tmp.xSQoMKhEgd ++ rm /tmp/tmp.t4ZwICCa1U /tmp/tmp.xSQoMKhEgd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.89Np3HMqon +++ mktemp ++ local LAST_ERR=/tmp/tmp.f9msYtalnp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.89Np3HMqon ++ cat /tmp/tmp.f9msYtalnp ++ rm /tmp/tmp.89Np3HMqon /tmp/tmp.f9msYtalnp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WBvSYZfe3P +++ mktemp ++ local LAST_ERR=/tmp/tmp.H7q5fo4VcP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WBvSYZfe3P ++ cat /tmp/tmp.H7q5fo4VcP ++ rm /tmp/tmp.WBvSYZfe3P /tmp/tmp.H7q5fo4VcP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OJ0Bya1IVW +++ mktemp ++ local LAST_ERR=/tmp/tmp.NyUGvTlzOT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OJ0Bya1IVW ++ cat /tmp/tmp.NyUGvTlzOT ++ rm /tmp/tmp.OJ0Bya1IVW /tmp/tmp.NyUGvTlzOT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cTTBkDx8wH +++ mktemp ++ local LAST_ERR=/tmp/tmp.aEwtMrX58O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cTTBkDx8wH ++ cat /tmp/tmp.aEwtMrX58O ++ rm /tmp/tmp.cTTBkDx8wH /tmp/tmp.aEwtMrX58O ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YaknFsJxVc +++ mktemp ++ local LAST_ERR=/tmp/tmp.RXzgIidbp2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YaknFsJxVc ++ cat /tmp/tmp.RXzgIidbp2 ++ rm /tmp/tmp.YaknFsJxVc /tmp/tmp.RXzgIidbp2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.y7FxUJuU8D ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.071lAoK7ka +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.y7FxUJuU8D +++++ cat /tmp/tmp.071lAoK7ka +++++ rm /tmp/tmp.y7FxUJuU8D /tmp/tmp.071lAoK7ka +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.S8LpJusLvi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SPAgu2I5on +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.S8LpJusLvi +++++ cat /tmp/tmp.SPAgu2I5on +++++ rm /tmp/tmp.S8LpJusLvi /tmp/tmp.SPAgu2I5on +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rv2vDmEwGR +++ mktemp ++ local LAST_ERR=/tmp/tmp.MFDl6kIkb8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rv2vDmEwGR ++ cat /tmp/tmp.MFDl6kIkb8 ++ rm /tmp/tmp.rv2vDmEwGR /tmp/tmp.MFDl6kIkb8 ++ return 0 + [[ 3 == \3 ]] + echo + sleep 60 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-scaledown + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-scaledown + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-scaledown 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:26:43+0000]' mysql_servers are configured in some-name-proxysql-0: OK [2026-04-08T13:26:43+0000] mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-scaledown + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-scaledown + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-scaledown 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:26:45+0000]' mysql_servers are configured in some-name-proxysql-1: OK [2026-04-08T13:26:45+0000] mysql_servers are configured in some-name-proxysql-1: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2-scaledown + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2-scaledown + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-2' check runtime_mysql_servers in some-name-proxysql-2+ compare_mysql_cmd_local mysql-servers-2-scaledown 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + echo + log 'mysql_servers are configured in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:26:47+0000]' mysql_servers are configured in some-name-proxysql-2: OK [2026-04-08T13:26:47+0000] mysql_servers are configured in some-name-proxysql-2: OK + desc 'check PXC pod 0 is reader and writer when pods 1 and 2 are down' + set +o xtrace ----------------------------------------------------------------------------------- check PXC pod 0 is reader and writer when pods 1 and 2 are down ----------------------------------------------------------------------------------- + log 'scaling PXC down to 1 replica' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:26:47+0000]' scaling PXC down to 1 replica [2026-04-08T13:26:47+0000] scaling PXC down to 1 replica + kubectl scale sts/some-name-pxc --replicas=1 statefulset.apps/some-name-pxc scaled + sleep 20 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-two-pod-down + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-two-pod-down + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 1 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 2 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 3 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 4 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'mysql_servers are configured in some-name-proxysql-0 when 2 pods are down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:27:56+0000]' mysql_servers are configured in some-name-proxysql-0 when 2 pods are down: OK [2026-04-08T13:27:56+0000] mysql_servers are configured in some-name-proxysql-0 when 2 pods are down: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-two-pod-down + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-two-pod-down + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + sleep 10 + echo -n . .+ let retry+=1 + [[ 1 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-1-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'mysql_servers are configured in some-name-proxysql-1 when 2 pods are down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:28:09+0000]' mysql_servers are configured in some-name-proxysql-1 when 2 pods are down: OK [2026-04-08T13:28:09+0000] mysql_servers are configured in some-name-proxysql-1 when 2 pods are down: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2-two-pod-down + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2-two-pod-down + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-2' check runtime_mysql_servers in some-name-proxysql-2+ compare_mysql_cmd_local mysql-servers-2-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + echo + log 'mysql_servers are configured in some-name-proxysql-2 when 2 pods are down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2026-04-08T13:28:11+0000]' mysql_servers are configured in some-name-proxysql-2 when 2 pods are down: OK [2026-04-08T13:28:11+0000] mysql_servers are configured in some-name-proxysql-2 when 2 pods are down: OK + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + destroy proxysql-scheduler-16506 + local namespace=proxysql-scheduler-16506 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + tee /tmp/tmp.IHyuq6Rrjq/operator.log + grep -v 'get backup status: Job.batch' + grep -v 'the object has been modified' + grep -v level=info + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.jcKZkKx2Kb ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' +++ mktemp ++ local LAST_ERR=/tmp/tmp.JAgRp2jHmg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ head -1 ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jcKZkKx2Kb ++ cat /tmp/tmp.JAgRp2jHmg ++ rm /tmp/tmp.jcKZkKx2Kb /tmp/tmp.JAgRp2jHmg ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-5b5dc68764-z6tb7 ++ mktemp + local LAST_OUT=/tmp/tmp.GwwxDq0Gbu ++ mktemp + local LAST_ERR=/tmp/tmp.2hpBl5phnl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-5b5dc68764-z6tb7 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GwwxDq0Gbu + cat /tmp/tmp.2hpBl5phnl + rm /tmp/tmp.GwwxDq0Gbu /tmp/tmp.2hpBl5phnl + return 0 2026-04-08T13:01:24.779Z INFO setup Feature gates {"PXCO_FEATURE_GATES": "", "enabled": ""} 2026-04-08T13:01:24.779Z INFO setup Manager starting up {"gitCommit": "d4f7121dd0aa338449cb21800d39d94f885e61d4", "gitBranch": "PR-2427-d4f7121d", "buildTime": "2026-04-08T10:03:30Z", "goVersion": "go1.25.9", "os": "linux", "arch": "amd64"} 2026-04-08T13:01:24.779Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.32.13-gke.1205000"} 2026-04-08T13:01:24.783Z INFO setup Registering Components. 2026-04-08T13:01:25.099Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2026-04-08T13:01:25.099Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2026-04-08T13:01:25.099Z INFO controller-runtime.metrics Starting metrics server 2026-04-08T13:01:25.099Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2026-04-08T13:01:25.099Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2026-04-08T13:01:25.099Z INFO controller-runtime.webhook Starting webhook server 2026-04-08T13:01:25.099Z INFO setup Starting the Cmd. 2026-04-08T13:01:25.099Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2026-04-08T13:01:25.100Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2026-04-08T13:01:25.200Z INFO Attempting to acquire leader lease... {"lock": "pxc-operator/08db1feb.percona.com"} 2026-04-08T13:01:25.235Z DEBUG events percona-xtradb-cluster-operator-5b5dc68764-z6tb7_7879e0c4-ab6c-4582-9260-0cd9993598a0 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"03c6ad52-3238-41df-81d5-52d9f7cabd2d","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1775653285228143009"}, "reason": "LeaderElection"} 2026-04-08T13:01:25.235Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2026-04-08T13:01:25.235Z INFO Successfully acquired lease {"lock": "pxc-operator/08db1feb.percona.com"} 2026-04-08T13:01:25.236Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2026-04-08T13:01:25.236Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2026-04-08T13:01:25.236Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2026-04-08T13:01:25.336Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2026-04-08T13:01:25.336Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2026-04-08T13:01:25.336Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2026-04-08T13:01:25.336Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2026-04-08T13:01:25.336Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2026-04-08T13:01:25.336Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2026-04-08T13:02:19.153Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5213073c-5f00-4f3b-b9dd-ab183a0741d1", "version": "1.20.0"} 2026-04-08T13:02:19.480Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5213073c-5f00-4f3b-b9dd-ab183a0741d1", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2026-04-08T13:02:19.503Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5213073c-5f00-4f3b-b9dd-ab183a0741d1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2026-04-08T13:02:19.629Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5213073c-5f00-4f3b-b9dd-ab183a0741d1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-04-08T13:02:19.668Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5213073c-5f00-4f3b-b9dd-ab183a0741d1", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-04-08T13:02:19.714Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5213073c-5f00-4f3b-b9dd-ab183a0741d1", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-04-08T13:02:19.762Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5213073c-5f00-4f3b-b9dd-ab183a0741d1", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-04-08T13:02:19.808Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5213073c-5f00-4f3b-b9dd-ab183a0741d1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-04-08T13:02:19.886Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5213073c-5f00-4f3b-b9dd-ab183a0741d1", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-04-08T13:02:20.717Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "6c7df240-802a-4374-82c5-38091f8b9c36", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-04-08T13:02:20.736Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "6c7df240-802a-4374-82c5-38091f8b9c36", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-04-08T13:03:42.100Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c420ac77-300c-4664-b8ab-087035853c5f", "user": "operator"} 2026-04-08T13:03:42.153Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c420ac77-300c-4664-b8ab-087035853c5f", "user": "monitor"} 2026-04-08T13:03:42.260Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c420ac77-300c-4664-b8ab-087035853c5f"} 2026-04-08T13:03:42.352Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c420ac77-300c-4664-b8ab-087035853c5f"} 2026-04-08T13:03:42.433Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c420ac77-300c-4664-b8ab-087035853c5f", "user": "xtrabackup"} 2026-04-08T13:03:42.557Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c420ac77-300c-4664-b8ab-087035853c5f"} 2026-04-08T13:03:42.624Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c420ac77-300c-4664-b8ab-087035853c5f", "user": "replication"} 2026-04-08T13:03:42.632Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c420ac77-300c-4664-b8ab-087035853c5f", "err": "get primary pxc pod: not found"} 2026-04-08T13:03:47.749Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5a091b68-2d0f-4480-aa8e-52d4d0636401", "err": "get primary pxc pod: not found"} 2026-04-08T13:03:52.867Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "60e3b23b-78f6-428d-89a0-8f665750013e", "err": "get primary pxc pod: not found"} 2026-04-08T13:06:24.005Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "b1c00f08-4e64-4ab0-b8af-af7c17362f4e", "user": "root"} 2026-04-08T13:06:24.121Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "b1c00f08-4e64-4ab0-b8af-af7c17362f4e", "new version": "8.0.43-34.1"} 2026-04-08T13:06:26.307Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "b1c00f08-4e64-4ab0-b8af-af7c17362f4e"} 2026-04-08T13:06:32.508Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "80ce799a-b557-4932-baea-1d955adb6024"} 2026-04-08T13:06:38.228Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "1684d8f7-1d74-4f35-b3e8-6294d3b54168"} 2026-04-08T13:06:43.595Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "ba7bef82-dbcc-4259-b227-fe096ccedb17"} 2026-04-08T13:06:48.595Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "16a90c54-190e-4bcf-81e4-10f3d10ab88e"} 2026-04-08T13:06:53.973Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "fa1e7c21-1c07-4831-96e3-24008178aedf"} 2026-04-08T13:06:59.709Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "66a133a0-d2c0-4b1b-a238-b5431a0d82bb"} 2026-04-08T13:07:04.587Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "93bb52ca-4742-46de-ad54-29e60e07f9ac"} 2026-04-08T13:07:09.911Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "f206d869-90eb-4783-aef1-f948b75629b1"} 2026-04-08T13:07:15.312Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c585c4c9-773b-4a30-9f64-1ba1a62b85b6"} 2026-04-08T13:07:20.428Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "8ac5fadf-1aca-42cc-9677-79c195358642"} 2026-04-08T13:07:26.107Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "76f69946-985f-48c5-b2f6-862a0a6082bb"} 2026-04-08T13:07:31.478Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "bc58fbf6-c375-46b7-8721-3aaff4a6e133"} 2026-04-08T13:07:36.784Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "6f2120f0-ac1a-4e90-9aa0-af4546689f51"} 2026-04-08T13:07:42.233Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "7d799a1d-4857-449a-ad89-33cd248334f2"} 2026-04-08T13:07:47.318Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5f199275-d00e-44cc-a04d-0ef3ef4a4b71"} 2026-04-08T13:07:52.910Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "71663391-96bc-4116-a3b2-c03788b5373f"} 2026-04-08T13:07:58.421Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "b4244b4a-4bd4-4fd1-a2c9-888cbda86363"} 2026-04-08T13:08:03.673Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "a6f39c05-caac-4ac6-a856-7ee5a1e83754"} 2026-04-08T13:08:08.528Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c3be9e20-9aa0-43d8-abf8-0c4676d521c5"} 2026-04-08T13:08:14.183Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "de469306-ef8b-4518-a257-2289ee1a8665"} 2026-04-08T13:08:19.309Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "813f3634-1fe9-4fa3-afc1-ae657e24bbde"} 2026-04-08T13:08:24.774Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "960bbdda-a33e-4db2-bbb4-c7dff8b53f98"} 2026-04-08T13:08:29.973Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "0de9ab2c-ea72-457b-a3b0-65477e68a93b"} 2026-04-08T13:08:35.409Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "8c420927-5316-4ef1-8dfa-868dc0a2012e"} 2026-04-08T13:08:41.004Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "82fcaf27-e9e1-4575-8cba-ab8790a66dae"} 2026-04-08T13:08:46.310Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "e5473b26-6ef6-4c4f-a871-340f481d9147"} 2026-04-08T13:08:51.203Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "8c456efa-d105-4d0f-b932-3edd8ee76a9c"} 2026-04-08T13:08:56.922Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "2cdce85c-4a19-4254-94bc-c540877c4cca"} 2026-04-08T13:09:02.106Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "da5bc319-bee0-414f-adca-cab67805b9d1"} 2026-04-08T13:09:07.588Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "e851c2a0-d7c6-4933-84cb-ec6c665f82fa"} 2026-04-08T13:09:12.526Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "3721b481-2849-4aea-bcbf-4d15b533b5a1"} 2026-04-08T13:09:18.031Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "a3804d62-6e3c-4d3a-b7fb-0fd069cdf88b"} 2026-04-08T13:09:23.331Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "808627d9-4977-4df3-bc4a-1707efe9892a"} 2026-04-08T13:09:28.683Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "a7147cb1-291a-41ff-8eb1-6a872a8ac830"} 2026-04-08T13:09:33.631Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "45a7ff81-45c7-4057-89e5-ff6a7daa0344"} 2026-04-08T13:09:39.008Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "ab7735e5-169f-49f5-9ee9-0b6ef23a0ec3"} 2026-04-08T13:09:44.826Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "1c227e16-fd27-42a7-a2cb-176aca5478dd"} 2026-04-08T13:09:50.013Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "b8b46d47-cc9c-4048-8a88-5574c96bfd2c"} 2026-04-08T13:09:55.427Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "35892700-7495-49b9-a0ce-018d48e510c0"} 2026-04-08T13:10:00.576Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "1fc60cc9-7f96-4f04-9ae0-d42bcb73a7f7"} 2026-04-08T13:10:05.915Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "83e42b2d-9a5c-490e-a05b-8ce91c772ccb"} 2026-04-08T13:10:11.129Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "9a392de1-bb62-4706-93a5-98feb266d475"} 2026-04-08T13:10:15.796Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "9ef7f3f9-dab3-4da0-babe-32e3e7595f80", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-04-08T13:10:15.851Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "9ef7f3f9-dab3-4da0-babe-32e3e7595f80", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-04-08T13:10:16.656Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "08161b2f-017f-4079-b817-89fe58f37454", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:832\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2026-04-08T13:11:03.526Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "f1ae9c3e-d69d-48cf-a52a-91f945e946ce"} 2026-04-08T13:11:07.836Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "8bde7f3c-d275-4b86-9b0f-4d7675f74be8"} 2026-04-08T13:11:13.225Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "ee606f99-7a3b-444e-985e-e489a3eaf17b"} 2026-04-08T13:11:18.202Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "effdb960-f297-4b40-a842-3b4ce545be92"} 2026-04-08T13:11:23.552Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "1c305d60-6f4c-4c36-8fe3-50eeb76d30aa"} 2026-04-08T13:11:29.204Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "13c1f5e9-281c-4294-8e42-3be4314a3f50"} 2026-04-08T13:11:34.356Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "e2e52db2-4983-4af9-bab2-bff93759ec50"} 2026-04-08T13:11:39.651Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5077637b-3a29-4d90-9cbc-46cd0e770ada"} 2026-04-08T13:11:44.648Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "8f9b32ef-0195-4bd5-bb61-86a09e103895"} 2026-04-08T13:11:50.204Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "2f45882e-9d96-4675-b142-9f0bc66989c0"} 2026-04-08T13:11:55.615Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "3e6388de-69e5-4969-b01e-7afb2a9d2e91"} 2026-04-08T13:12:00.535Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "914f5b66-8ac5-4735-96c3-af4268596e0c"} 2026-04-08T13:12:06.214Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "fac913c5-76b6-4297-8252-342a47e6e1e9"} 2026-04-08T13:12:11.534Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "74612afd-233b-4937-af3d-03fd80f2ebab"} 2026-04-08T13:12:16.621Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "788b0d4f-d544-4c7a-8c5d-be2bac6f8113"} 2026-04-08T13:12:22.156Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "6d7e8595-aa01-40ba-8ce7-2a1402563663"} 2026-04-08T13:12:27.541Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "f7e2fadc-e4b3-40ee-947d-fc8f31636b36"} 2026-04-08T13:12:32.700Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "f687b066-7629-4d10-895e-c6abe956bf1c"} 2026-04-08T13:12:38.146Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "af97d96f-a2cd-47da-8199-826d218def12"} 2026-04-08T13:12:43.329Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c1013d77-8a55-4de3-957d-72cfdf30829e"} 2026-04-08T13:12:48.316Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "44e7a396-94c3-466d-b895-fca78a6b74ee", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-04-08T13:12:48.611Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "4b85543a-a0af-4fd7-839d-32c2b2639732"} 2026-04-08T13:13:30.004Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "97373f02-f08b-4287-b74d-74c20baefa8b"} 2026-04-08T13:13:32.941Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "744eb812-aa22-45a0-b51b-7f0ef52d5808", "err": "get primary pxc pod: not found"} 2026-04-08T13:13:36.083Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "744eb812-aa22-45a0-b51b-7f0ef52d5808", "error": "syncusers: ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "syncusers: ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:979\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:832\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2026-04-08T13:13:40.597Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "a1798558-4647-4093-8eae-70ec9c3b5074"} 2026-04-08T13:13:46.052Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "022801fe-7634-4514-b742-3ab3c55bdfc3"} 2026-04-08T13:13:51.409Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "4ffaf1a3-7576-417f-b10f-70c1de965394"} 2026-04-08T13:13:56.444Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "32529c4c-59fd-4815-bc89-ef46e3aad43b"} 2026-04-08T13:14:01.829Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "44fbc2f0-283f-48f9-b09f-7b0c9f3d7ad5"} 2026-04-08T13:14:07.148Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "e03b98fb-2505-4e3f-827a-8b8272c043c1"} 2026-04-08T13:14:12.432Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c808a1f6-d515-4aaf-a29e-450acdfd341c"} 2026-04-08T13:14:17.533Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "d8396cc6-c040-49df-9dbf-8428b26d005f"} 2026-04-08T13:14:22.802Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "2c210b32-1a28-4aa1-a21e-e6ff7722b3f1"} 2026-04-08T13:14:28.613Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "58de5898-28c0-49fe-9dc1-2600a85d0944"} 2026-04-08T13:14:33.644Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "040f63af-de86-45f5-b0e2-d60d003fa4c2"} 2026-04-08T13:14:38.635Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "b65a4e95-84c2-4808-b682-f4603d23acfc"} 2026-04-08T13:14:47.891Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "e65a2fba-46cc-4450-8f90-a6fe1d230f14", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-16506.svc.cluster.local"} 2026-04-08T13:14:48.018Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "39b211c4-0b27-4f9f-84e2-45b88d0f52bc", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-16506.svc.cluster.local"} 2026-04-08T13:14:53.138Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "82f285ee-9ec3-466c-a80d-f45d9fd06d8f", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-16506.svc.cluster.local"} 2026-04-08T13:14:58.263Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "80031d53-0f98-456d-ba04-f6ecf5c3ac7b", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-16506.svc.cluster.local"} 2026-04-08T13:15:03.376Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "ac3577fd-7915-47d5-b92f-d0a8c8328b4a", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-16506.svc.cluster.local"} 2026-04-08T13:15:08.491Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "975e087b-2b53-4196-99d7-45bb1ac22471", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-16506.svc.cluster.local"} 2026-04-08T13:15:13.665Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "83cb5873-2b99-4b06-80b5-8698ecd98c5c", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-16506.svc.cluster.local"} 2026-04-08T13:15:18.839Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "9c2e3cb8-5f2e-4d6a-a177-7facaea231a6", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-16506.svc.cluster.local"} 2026-04-08T13:15:23.973Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "3d452034-9745-4277-91d2-158812aef679", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-16506.svc.cluster.local"} 2026-04-08T13:15:32.441Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "63fc24ae-3bfb-4d71-b80d-8d025ba0d415"} 2026-04-08T13:15:37.465Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c19e7571-d74f-4ba4-8755-5ab55d283a3b", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-04-08T13:15:37.584Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c19e7571-d74f-4ba4-8755-5ab55d283a3b", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-04-08T13:15:37.822Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "bdf547f6-3f9d-43e9-884b-4e4b2a05bd8c"} 2026-04-08T13:18:28.656Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "1a9e01f6-aaa4-4828-a951-442440451782"} 2026-04-08T13:18:34.797Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "01b0c12f-0cac-4ce7-95fd-f94151f99a1d"} 2026-04-08T13:18:40.325Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "9f38a1ea-3b4e-4d3f-ae39-af9ccfb7a407"} 2026-04-08T13:18:45.608Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "dca6b283-9c2c-4b54-bb23-f5f6c8ba96aa"} 2026-04-08T13:18:51.217Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "f53cea8e-f78b-46a0-b793-2ff7a557abac"} 2026-04-08T13:18:56.536Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "8ebb3002-aeb4-45d1-a532-81ce487c7daa"} 2026-04-08T13:19:01.931Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "68fc67a5-f2a5-4441-91f5-17f6568ab650"} 2026-04-08T13:19:07.236Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "af6ce3ef-de52-4c87-af6c-ff2bbc2afe1b"} 2026-04-08T13:19:12.613Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "353a14d6-749b-47d7-8743-718856f181bf"} 2026-04-08T13:19:17.832Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5a82ed0d-4eee-47cf-b843-5ad50c9ac790"} 2026-04-08T13:19:23.628Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "02abddad-0f2e-4e9d-a602-af0efa6255f6"} 2026-04-08T13:19:29.026Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "70dfd821-17ae-4524-84ad-6a902003debf"} 2026-04-08T13:19:34.553Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "45cb9f3d-44e6-432e-bff4-081aa13804d3"} 2026-04-08T13:19:40.138Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "92e01625-fb28-4c95-996a-288a717dca59"} 2026-04-08T13:19:45.412Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "f47005c4-bc86-4f22-b123-ae1842e262d7"} 2026-04-08T13:19:51.247Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "d14a0b48-84a7-41a5-b153-e0acbf39168e"} 2026-04-08T13:19:56.424Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "0a33ff39-1811-4255-a547-3a1984c05252"} 2026-04-08T13:20:02.148Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "e2e83b81-a373-40db-9d61-4b5a6916c4ce"} 2026-04-08T13:20:07.413Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "0fbd5f27-c2cb-4884-8342-85eeff2cebf6"} 2026-04-08T13:20:12.639Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "2a640454-6a84-4c1e-af9b-6d8b28855253"} 2026-04-08T13:20:18.116Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "41f77f5d-9f11-4ee1-a915-37976158e875"} 2026-04-08T13:20:23.727Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "cdc948db-fefb-4499-a5eb-647e3f7ae654"} 2026-04-08T13:20:29.216Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "961c1285-66d4-4222-90ad-d9460239c7d1"} 2026-04-08T13:20:29.489Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "fc7cc5b1-ff3c-4bfa-b72d-678fed2477a1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-04-08T13:20:29.549Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "fc7cc5b1-ff3c-4bfa-b72d-678fed2477a1", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-04-08T13:20:33.294Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "fc7cc5b1-ff3c-4bfa-b72d-678fed2477a1", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:832\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2026-04-08T13:21:01.363Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "48d36170-6641-4aef-a755-28e826b8b519", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:832\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2026-04-08T13:21:06.068Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "98757182-7c1a-4854-afe9-8a6c2b3233c3", "err": "get primary pxc pod: not found"} 2026-04-08T13:21:10.712Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "98757182-7c1a-4854-afe9-8a6c2b3233c3"} 2026-04-08T13:21:15.157Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "ef8ca112-53c7-40fe-a869-51aafeff410f"} 2026-04-08T13:21:20.927Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "bc1c87ff-6b1a-4654-b75f-9230a0ae50e5"} 2026-04-08T13:21:26.149Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "9fa685b4-8eea-4964-aebf-74eea6eb82c8"} 2026-04-08T13:21:31.563Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "ce2d5cb9-a059-456d-92de-8b0268e8ad3a"} 2026-04-08T13:21:36.942Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "33473a68-3a71-4a97-9c2a-c281f9bab484"} 2026-04-08T13:21:42.551Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "bc76d0eb-0982-4522-b839-eae0e3565ba7"} 2026-04-08T13:21:47.741Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "d9fb35d9-9c8c-4c4b-af24-31c89677b588"} 2026-04-08T13:21:53.147Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "bd66f2d0-a26b-4eea-abfc-584ad745ce5e"} 2026-04-08T13:21:58.564Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "d371c7a9-2bb7-4fdb-9307-b1c81f35aa42"} 2026-04-08T13:22:04.150Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "2a4a241a-3f28-44f8-8d9b-88a62adea092"} 2026-04-08T13:22:09.563Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "78166c51-718e-4470-bc9f-cd763be14d14"} 2026-04-08T13:22:14.779Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "8da357b3-9ed7-4d36-988e-651b6997a19f"} 2026-04-08T13:22:20.151Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "6381f1ee-ef49-40dd-bb04-3cc212e26441"} 2026-04-08T13:22:25.861Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "a448ccab-e5af-4828-9308-31fa6e50c0cb"} 2026-04-08T13:22:31.161Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "cf77adac-428b-4764-b967-9682ddf55cff"} 2026-04-08T13:22:36.534Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "ebceef83-02a4-47b6-9e56-976d2cb96c31"} 2026-04-08T13:22:42.134Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "19292a16-2bbe-4f66-a539-4607031ee55b"} 2026-04-08T13:22:47.373Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "0d873a8f-66f2-4cbc-b61c-a350cfa34bff"} 2026-04-08T13:22:53.127Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "1e216c49-650e-481d-87aa-9128bab5471a"} 2026-04-08T13:22:58.638Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "72286292-7467-4e8a-b605-368da3422207"} 2026-04-08T13:22:59.699Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5361597f-a062-43f9-ab80-fbf621e98833", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-04-08T13:22:59.761Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5361597f-a062-43f9-ab80-fbf621e98833", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-04-08T13:23:03.223Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5361597f-a062-43f9-ab80-fbf621e98833", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:832\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2026-04-08T13:23:50.257Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "99c1eab9-c094-45af-b54a-302be29c85b9"} 2026-04-08T13:23:56.697Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5b4426ea-a204-45e6-83d1-d69ca1b52854"} 2026-04-08T13:24:07.242Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "5e36620d-8d6b-4559-99b2-5372e090945b"} 2026-04-08T13:24:12.762Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "822887cf-10d6-41de-a6c2-62cf7dc05918"} 2026-04-08T13:24:18.067Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "861badc3-9e80-41d8-ae48-4a34f649801d"} 2026-04-08T13:24:23.437Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "efa861f6-a3f3-4e44-b054-f880e621c7df"} 2026-04-08T13:24:28.648Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "950e6da1-f35c-48b5-8869-a4c7a3a04f3d"} 2026-04-08T13:24:33.984Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "6c92a742-07bb-4eef-8ba5-cb92bc253150"} 2026-04-08T13:24:39.067Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "8f80768a-6bd7-46c8-82ea-b06062c7c65a"} 2026-04-08T13:24:45.184Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "a9cabe9e-35f2-49fe-9e76-583e0cf0caa4"} 2026-04-08T13:24:50.859Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "d5b2dcce-1c7c-4159-801e-dec99b2b814c"} 2026-04-08T13:24:56.249Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "85f4e16c-54f4-47dc-94b6-d7f0e83bed85"} 2026-04-08T13:24:59.451Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "490364b5-f5e3-47ac-825c-5ea77e81792a", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-04-08T13:24:59.533Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "490364b5-f5e3-47ac-825c-5ea77e81792a", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2026-04-08T13:25:01.672Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "f7068cff-ce2f-4254-a001-0b6edb17740a"} 2026-04-08T13:25:12.052Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "ef859a04-abbd-4179-b72d-19355ac198d0", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-4: dial tcp 10.148.72.77:33062: connect: connection refused"} 2026-04-08T13:25:23.026Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "0c4eae9f-4c42-4b0b-b295-a7daa9633dea", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-3: dial tcp: lookup some-name-pxc-3.some-name-pxc.proxysql-scheduler-16506 on 34.118.224.10:53: no such host"} 2026-04-08T13:25:28.321Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "ddbae6f0-4730-4120-b6f4-33b0c4c3b9b2", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-3: dial tcp 10.148.73.45:33062: connect: connection refused"} 2026-04-08T13:25:38.181Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "2e5ffcae-50e2-4db0-bb4d-fac08a2d0337"} 2026-04-08T13:25:43.552Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "2fcf3239-0154-4aa1-b280-c14f1e5a0682"} 2026-04-08T13:25:48.661Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "6c254ac9-f081-419b-bd85-1c356cddbf47"} 2026-04-08T13:25:53.745Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "7d310765-87d0-427f-bd38-e8d699d69b2e"} 2026-04-08T13:25:59.048Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "d892752b-1b90-4201-b1a4-af2659ccacdd"} 2026-04-08T13:26:04.470Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "34e72403-d3a1-4151-89fe-93cad0e73341"} 2026-04-08T13:26:10.446Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "a022258f-c9fd-4fb7-946a-1b779d3dd949"} 2026-04-08T13:26:15.053Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "0a676b75-1a02-45d1-8e7f-75953b0c88bb"} 2026-04-08T13:26:20.594Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "cb4763cb-c67a-4b9b-9186-d39aa1c32634"} 2026-04-08T13:26:25.982Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "9ab3d27a-cd7d-499c-8532-012c7d3d9ea4"} 2026-04-08T13:26:31.175Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "99211edd-6cf2-4d29-8370-3295dc5d0366"} 2026-04-08T13:26:36.484Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "6bad2fa4-baaf-4651-aa78-49c47c89ca5c"} 2026-04-08T13:26:42.083Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "4a7a20fb-6eaa-4135-bc7f-be8faf2d2669"} 2026-04-08T13:26:46.996Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "2521ff14-0ab1-4ec9-a972-78bea3b36348"} 2026-04-08T13:26:52.252Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "c55803b2-5eb7-4f76-b9ad-9b643b2015e4"} 2026-04-08T13:27:00.035Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "35769949-55bb-4a35-ba9a-8f66493ba80e", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp 10.148.74.47:33062: connect: connection refused"} 2026-04-08T13:27:17.248Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-16506"}, "namespace": "proxysql-scheduler-16506", "name": "some-name", "reconcileID": "12383f15-b013-49f2-8bd4-146b51d35073", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.148.73.44:33062: connect: connection refused"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:834 [mysql] 2026/04/08 13:10:15 packets.go:58 unexpected EOF -  }, -  { -  }, -  { -  }, -  }, +  }, +  { +  }, +  }, -  Annotations: map[string]string{ +  Annotations: map[string]string{ -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  Args: []string{ +  Args: []string{ +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, -  AvailableReplicas: 5, -  CollisionCount: &0, +  CollisionCount: nil, -  Command: []string{"/opt/percona/proxysql-entrypoint.sh"}, +  Command: []string{"/opt/percona/proxysql-entrypoint.sh"}, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2026-04-08 13:02:19 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, -  CurrentReplicas: 5, +  CurrentRevision: "", -  CurrentRevision: "some-name-proxysql-7bd48b5fdb", -  CurrentRevision: "some-name-proxysql-85fb99667b", -  CurrentRevision: "some-name-pxc-66bcbddfc9", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, +  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, -  Env: []v1.EnvVar{ +  Env: []v1.EnvVar{ -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Image: "perconalab/percona-xtradb-cluster-operator:main-proxysql", +  Image: "perconalab/percona-xtradb-cluster-operator:main-proxysql", -  ImagePullPolicy: "Always", +  ImagePullPolicy: "Always", +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, +  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "OPERATOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, +  {Name: "OPERATOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, +  {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, +  {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, -  Name: "proxysql-monit", +  Name: "proxysql-monit", -  {Name: "PROXYSQL_SERVICE", Value: "some-name-proxysql-unready"}, +  {Name: "PROXYSQL_SERVICE", Value: "some-name-proxysql-unready"}, -  {Name: "SCHEDULER_ENABLED", Value: "true"}, +  {Name: "SCHEDULER_ENABLED", Value: "true"}, +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  "-on-change=/opt/percona/proxysql_add_proxysql_nodes.sh", +  "-on-change=/opt/percona/proxysql_add_proxysql_nodes.sh", -  Operation: "Update", -  Operation: "Update", -  "/opt/percona/peer-list", +  "/opt/percona/peer-list", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTI0MjctZDRmNzEyMWQiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTI0MjctZDRmNzEyMWQiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifSx7Im5hbWUiOiJTQ0hFRFVMRVJfRU5BQkxFRCIsInZhbHVlIjoidHJ1ZSJ9XSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI3MDBtIiwibWVtb3J5IjoiMUcifSwicmVxdWVzdHMiOnsiY3B1IjoiMTAwbSIsIm1lbW9y"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTI0MjctZDRmNzEyMWQiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTI0MjctZDRmNzEyMWQiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifSx7Im5hbWUiOiJTQ0hFRFVMRVJfRU5BQkxFRCIsInZhbHVlIjoidHJ1ZSJ9XSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI3MDBtIiwibWVtb3J5IjoiMUcifSwicmVxdWVzdHMiOnsiY3B1IjoiMTAwbSIsIm1lbW9y"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTI0MjctZDRmNzEyMWQiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTI0MjctZDRmNzEyMWQiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifV0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNzAwbSIsIm1lbW9yeSI6IjFHIn0sInJlcXVlc3RzIjp7ImNwdSI6IjEwMG0iLCJtZW1vcnkiOiIxMDBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJwcm94eWRh"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTI0MjctZDRmNzEyMWQiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTI0MjctZDRmNzEyMWQiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifV0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNzAwbSIsIm1lbW9yeSI6IjFHIn0sInJlcXVlc3RzIjp7ImNwdSI6IjEwMG0iLCJtZW1vcnkiOiIxMDBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJwcm94eWRh"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTI0MjctZDRmNzEyMWQiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTI0MjctZDRmNzEyMWQiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifSx7Im5hbWUiOiJTQ0hFRFVMRVJfRU5BQkxFRCIsInZhbHVlIjoidHJ1ZSJ9XSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI3MDBtIiwibWVtb3J5IjoiMUcifSwicmVxdWVzdHMiOnsiY3B1IjoiMTAwbSIsIm1lbW9y"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTI0MjctZDRmNzEyMWQiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTI0MjctZDRmNzEyMWQiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifSx7Im5hbWUiOiJTQ0hFRFVMRVJfRU5BQkxFRCIsInZhbHVlIjoidHJ1ZSJ9XSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI3MDBtIiwibWVtb3J5IjoiMUcifSwicmVxdWVzdHMiOnsiY3B1IjoiMTAwbSIsIm1lbW9y"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6NSwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6NSwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, -  ReadyReplicas: 5, +  Replicas: 0, -  Replicas: 2, -  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, -  Replicas: 5, -  Replicas: &5, +  Replicas: &5, +  ResourceVersion: "", -  ResourceVersion: "1775653387493759009", -  ResourceVersion: "1775653840813199009", -  ResourceVersion: "1775653997074495009", -  ResourceVersion: "1775654127672895008", -  ResourceVersion: "1775654300854575008", -  ResourceVersion: "1775654451488943009", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  "-service=$(PROXYSQL_SERVICE)", "-protocol=$(PEER_LIST_SRV_PROTOCOL)", +  "-service=$(PROXYSQL_SERVICE)", "-protocol=$(PEER_LIST_SRV_PROTOCOL)", -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2026-04-08 13:02:19 +0000 UTC", -  Time: s"2026-04-08 13:03:07 +0000 UTC", -  Time: s"2026-04-08 13:10:15 +0000 UTC", -  Time: s"2026-04-08 13:10:40 +0000 UTC", -  Time: s"2026-04-08 13:12:48 +0000 UTC", -  Time: s"2026-04-08 13:13:17 +0000 UTC", -  Time: s"2026-04-08 13:15:27 +0000 UTC", -  Time: s"2026-04-08 13:15:37 +0000 UTC", -  Time: s"2026-04-08 13:18:20 +0000 UTC", -  Time: s"2026-04-08 13:20:29 +0000 UTC", -  Time: s"2026-04-08 13:20:51 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "6c1480d1-d081-4b2e-8a47-de8f3803a6fd", -  UID: "eb763b3e-0656-4bb0-a9cb-87e8e8bca322", +  UpdatedReplicas: 0, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, -  UpdatedReplicas: 5, +  UpdateRevision: "", -  UpdateRevision: "some-name-proxysql-7bd48b5fdb", -  UpdateRevision: "some-name-proxysql-85fb99667b", -  UpdateRevision: "some-name-pxc-66bcbddfc9", +  Value: "0", -  Value: "1", -  VolumeMode: &"Filesystem", +  VolumeMode: nil, -  VolumeMounts: []v1.VolumeMount{{Name: "bin", MountPath: "/opt/percona"}}, +  VolumeMounts: []v1.VolumeMount{{Name: "bin", MountPath: "/opt/percona"}},   }    },    },    {    },    },    {    },    },    {    },    ... // 11 identical elements    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical elements    ... // 4 identical fields    ... // 5 identical elements    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 8 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: nil,    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    Containers: []v1.Container{    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...},    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostIP: "",    HostPort: 0,    ImagePullPolicy: "Always",    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    Labels: nil,    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-proxysql"},    LocalObjectReference: {Name: "some-name-pxc"},    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    Name: "config",    Name: "ist",    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "PERCONA_SCHEDULER_CFG", Value: "/tmp/scheduler-config.toml"},    Name: "proxyadm",    {Name: "SCHEDULER_CHECKTIMEOUT", Value: "2000"},    {Name: "SCHEDULER_MAXCONNECTIONS", Value: "1000"},    {Name: "SCHEDULER_RETRYDOWN", Value: "3"},    {Name: "SCHEDULER_RETRYUP", Value: "1"},    Name: "SCHEDULER_WRITERALSOREADER",    Namespace: "proxysql-scheduler-16506",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "c00e89d30174d6aa7ad8388d7e21a27b", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "5530a59b-6cbf-4d39-9a1d-2ee442bd2e2f", ...}},    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &2,    Replicas: &3,    ResizePolicy: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "700m", Format: "DecimalSI"}, s"memory": {i: {...}, s: "1G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}},    SecretName: "internal-some-name",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    ValueFrom: nil,    VolumeAttributesClassName: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n proxysql-scheduler-16506 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.K2bc9IkHSa ++ mktemp + local LAST_ERR=/tmp/tmp.ngBg9CGKXi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.K2bc9IkHSa perconaxtradbcluster.pxc.percona.com "some-name" deleted from proxysql-scheduler-16506 namespace + cat /tmp/tmp.ngBg9CGKXi + rm /tmp/tmp.K2bc9IkHSa /tmp/tmp.ngBg9CGKXi + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.QpDrslPqi3 ++ mktemp + local LAST_ERR=/tmp/tmp.kaQcHNsyT4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QpDrslPqi3 No resources found + cat /tmp/tmp.kaQcHNsyT4 + rm /tmp/tmp.QpDrslPqi3 /tmp/tmp.kaQcHNsyT4 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.sNCuhFiiHs ++ mktemp + local LAST_ERR=/tmp/tmp.lkMfzmXHDa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sNCuhFiiHs No resources found + cat /tmp/tmp.lkMfzmXHDa + rm /tmp/tmp.sNCuhFiiHs /tmp/tmp.lkMfzmXHDa + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.HHMgw8fh2s ++ mktemp + local LAST_ERR=/tmp/tmp.PJTAPe3g8N + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HHMgw8fh2s validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.PJTAPe3g8N + rm /tmp/tmp.HHMgw8fh2s /tmp/tmp.PJTAPe3g8N + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + rm -rf /tmp/tmp.IHyuq6Rrjq + kubectl_bin delete --grace-period=0 --force=true namespace proxysql-scheduler-16506 + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.xWzgrFtoEb ++ mktemp + local LAST_OUT=/tmp/tmp.LhiHiq7VWn ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.uuJb8AwZBU + local exit_status=0 + local LAST_ERR=/tmp/tmp.ndcX20z8Bn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace proxysql-scheduler-16506 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator