Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/logs/proxysql-scheduler-8-0.log Warning: version difference between client (1.35) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.35) and server (1.31) exceeds the supported minor version skew of +/-1 + cluster=some-name + create_infra proxysql-scheduler-7615 + local ns=proxysql-scheduler-7615 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n proxysql-scheduler-2668 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.uBLAanz5al ++ mktemp + local LAST_ERR=/tmp/tmp.2ge9m9SKzG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uBLAanz5al perconaxtradbcluster.pxc.percona.com "some-name" deleted from proxysql-scheduler-2668 namespace + cat /tmp/tmp.2ge9m9SKzG + rm /tmp/tmp.uBLAanz5al /tmp/tmp.2ge9m9SKzG + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ZlVGiYTey9 ++ mktemp + local LAST_ERR=/tmp/tmp.EiXKloMDKO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZlVGiYTey9 No resources found + cat /tmp/tmp.EiXKloMDKO + rm /tmp/tmp.ZlVGiYTey9 /tmp/tmp.EiXKloMDKO + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.T23gi3Sfie ++ mktemp + local LAST_ERR=/tmp/tmp.L55bLWB2LF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.T23gi3Sfie No resources found + cat /tmp/tmp.L55bLWB2LF + rm /tmp/tmp.T23gi3Sfie /tmp/tmp.L55bLWB2LF + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ sed s/NAMESPACE// ++ helm list --all-namespaces --filter chaos-mesh + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl api-resources ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.oCo5qjOwMw + local LAST_OUT=/tmp/tmp.m7E5L7uKUb ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.q0cgO6PknL + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.mKiTa7xDrU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oCo5qjOwMw + cat /tmp/tmp.q0cgO6PknL + rm /tmp/tmp.oCo5qjOwMw /tmp/tmp.q0cgO6PknL + return 0 namespace "proxysql-scheduler-2668" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.m7E5L7uKUb namespace "pxc-operator" deleted + cat /tmp/tmp.mKiTa7xDrU + rm /tmp/tmp.m7E5L7uKUb /tmp/tmp.mKiTa7xDrU + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.vz5r6okscT ++ mktemp + local LAST_ERR=/tmp/tmp.svbkEbj0kv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vz5r6okscT namespace/pxc-operator created + cat /tmp/tmp.svbkEbj0kv + rm /tmp/tmp.vz5r6okscT /tmp/tmp.svbkEbj0kv + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.6RMCUoWu2W +++ mktemp ++ local LAST_ERR=/tmp/tmp.xxPaM6nwHl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6RMCUoWu2W ++ cat /tmp/tmp.xxPaM6nwHl ++ rm /tmp/tmp.6RMCUoWu2W /tmp/tmp.xxPaM6nwHl ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2330-8293f071-6-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.uopyenmMXK ++ mktemp + local LAST_ERR=/tmp/tmp.NaQcavw1Ye + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2330-8293f071-6-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uopyenmMXK Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2330-8293f071-6-cluster8" modified. + cat /tmp/tmp.NaQcavw1Ye + rm /tmp/tmp.uopyenmMXK /tmp/tmp.NaQcavw1Ye + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.KL6U9e4L5H ++ mktemp + local LAST_ERR=/tmp/tmp.C0IninUuhS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KL6U9e4L5H customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.C0IninUuhS + rm /tmp/tmp.KL6U9e4L5H /tmp/tmp.C0IninUuhS + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/deploy/cw-rbac.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.LyupnjUBZF ++ mktemp + local LAST_ERR=/tmp/tmp.XS1s08i4iJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LyupnjUBZF clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.XS1s08i4iJ + rm /tmp/tmp.LyupnjUBZF /tmp/tmp.XS1s08i4iJ + return 0 + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "PXCO_FEATURE_GATES").value) = ""' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2330-8293f071^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.g5xzwGIhNO ++ mktemp + local LAST_ERR=/tmp/tmp.CxtNQLOvvs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.g5xzwGIhNO deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.CxtNQLOvvs + rm /tmp/tmp.g5xzwGIhNO /tmp/tmp.CxtNQLOvvs + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.KvwlGkIo3k ++ mktemp + local LAST_ERR=/tmp/tmp.RPhd7moBYt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KvwlGkIo3k pod/percona-xtradb-cluster-operator-65b6f89d5d-hztxk condition met + cat /tmp/tmp.RPhd7moBYt + rm /tmp/tmp.KvwlGkIo3k /tmp/tmp.RPhd7moBYt + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ head -1 ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.ImQYWyikE8 ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' +++ mktemp ++ local LAST_ERR=/tmp/tmp.8HOMgh1xVt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ImQYWyikE8 ++ cat /tmp/tmp.8HOMgh1xVt ++ rm /tmp/tmp.ImQYWyikE8 /tmp/tmp.8HOMgh1xVt ++ return 0 + wait_pod percona-xtradb-cluster-operator-65b6f89d5d-hztxk 480 pxc-operator + local pod=percona-xtradb-cluster-operator-65b6f89d5d-hztxk + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-65b6f89d5d-hztxk ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-65b6f89d5d-hztxk condition met waiting for pod/percona-xtradb-cluster-operator-65b6f89d5d-hztxk to become Ready.Ok + sleep 3 + create_namespace proxysql-scheduler-7615 + local namespace=proxysql-scheduler-7615 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl api-resources ++ grep chaos-mesh ++ grep chaos-mesh.org ++ awk '{print $1}' ++ kubectl get crd + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces proxysql-scheduler-7615' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces proxysql-scheduler-7615 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace proxysql-scheduler-7615 + xargs kubectl delete ns ++ mktemp + awk '{print$1}' + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + kubectl_bin get ns + local LAST_OUT=/tmp/tmp.CLqaqlgaln ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.sCSrjufelC ++ mktemp + local LAST_ERR=/tmp/tmp.JNX7y38kfA + local exit_status=0 + local LAST_ERR=/tmp/tmp.aBfgyFjwDO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-7615 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sCSrjufelC + cat /tmp/tmp.aBfgyFjwDO + rm /tmp/tmp.sCSrjufelC /tmp/tmp.aBfgyFjwDO + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-7615 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-7615 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.CLqaqlgaln + cat /tmp/tmp.JNX7y38kfA Error from server (NotFound): namespaces "proxysql-scheduler-7615" not found + rm /tmp/tmp.CLqaqlgaln /tmp/tmp.JNX7y38kfA + return 1 + : + wait_for_delete namespace/proxysql-scheduler-7615 + local res=namespace/proxysql-scheduler-7615 + echo -n 'waiting for namespace/proxysql-scheduler-7615 to be deleted' waiting for namespace/proxysql-scheduler-7615 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "proxysql-scheduler-7615" not found + desc 'create namespace proxysql-scheduler-7615' + set +o xtrace ----------------------------------------------------------------------------------- create namespace proxysql-scheduler-7615 ----------------------------------------------------------------------------------- + kubectl_bin create namespace proxysql-scheduler-7615 ++ mktemp + local LAST_OUT=/tmp/tmp.EEzeEagQBs ++ mktemp + local LAST_ERR=/tmp/tmp.2Z2H7PuIqZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace proxysql-scheduler-7615 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EEzeEagQBs namespace/proxysql-scheduler-7615 created + cat /tmp/tmp.2Z2H7PuIqZ + rm /tmp/tmp.EEzeEagQBs /tmp/tmp.2Z2H7PuIqZ + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.SSEhqTnUa2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rYKr5ToBLB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SSEhqTnUa2 ++ cat /tmp/tmp.rYKr5ToBLB ++ rm /tmp/tmp.SSEhqTnUa2 /tmp/tmp.rYKr5ToBLB ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2330-8293f071-6-cluster8 --namespace=proxysql-scheduler-7615 ++ mktemp + local LAST_OUT=/tmp/tmp.E7pfhwWxMk ++ mktemp + local LAST_ERR=/tmp/tmp.l902xGAff0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2330-8293f071-6-cluster8 --namespace=proxysql-scheduler-7615 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.E7pfhwWxMk Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2330-8293f071-6-cluster8" modified. + cat /tmp/tmp.l902xGAff0 + rm /tmp/tmp.E7pfhwWxMk /tmp/tmp.l902xGAff0 + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.CR7diAfOFr ++ mktemp + local LAST_ERR=/tmp/tmp.DUn2jzvqig + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CR7diAfOFr secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.DUn2jzvqig + rm /tmp/tmp.CR7diAfOFr /tmp/tmp.DUn2jzvqig + return 0 + desc 'create PXC cluster: some-name' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster: some-name ----------------------------------------------------------------------------------- + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.2zXbwoy5on ++ mktemp + local LAST_ERR=/tmp/tmp.TFckxqncht + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2zXbwoy5on secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.TFckxqncht + rm /tmp/tmp.2zXbwoy5on /tmp/tmp.TFckxqncht + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/client.yml '' + kubectl_bin apply -f - + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/client.yml + local pvc_name= ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.u4n3o1b1xv + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.proxysql-scheduler-7615~ + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2330-8293f071#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + local LAST_ERR=/tmp/tmp.u7cpqfxsCi + local exit_status=0 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.u4n3o1b1xv deployment.apps/pxc-client created + cat /tmp/tmp.u7cpqfxsCi + rm /tmp/tmp.u4n3o1b1xv /tmp/tmp.u7cpqfxsCi + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/conf/some-name.yml + local pvc_name= ++ mktemp + local LAST_OUT=/tmp/tmp.32ezNseR9a + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.proxysql-scheduler-7615~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2330-8293f071#' ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_ERR=/tmp/tmp.90d4EWEsHo + local exit_status=0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/conf/some-name.yml + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.32ezNseR9a perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.90d4EWEsHo + rm /tmp/tmp.32ezNseR9a /tmp/tmp.90d4EWEsHo + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.uibUMgNatn ++++ mktemp +++ local LAST_ERR=/tmp/tmp.Xso4MwUXJf +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.uibUMgNatn +++ cat /tmp/tmp.Xso4MwUXJf +++ rm /tmp/tmp.uibUMgNatn /tmp/tmp.Xso4MwUXJf +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.hnmVcNkDJT ++++ mktemp +++ local LAST_ERR=/tmp/tmp.2hXEZ4oIAN +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.hnmVcNkDJT +++ cat /tmp/tmp.2hXEZ4oIAN +++ rm /tmp/tmp.hnmVcNkDJT /tmp/tmp.2hXEZ4oIAN +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-7615 ++ mktemp + local LAST_OUT=/tmp/tmp.Qv5CNI9xPq ++ mktemp + local LAST_ERR=/tmp/tmp.IukyaIU3SU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-7615 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-7615 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-7615 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.Qv5CNI9xPq + cat /tmp/tmp.IukyaIU3SU error: no matching resources found + rm /tmp/tmp.Qv5CNI9xPq /tmp/tmp.IukyaIU3SU + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ base64 --decode ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AWyAwCF5eI +++ mktemp ++ local LAST_ERR=/tmp/tmp.XRw2bybbTB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AWyAwCF5eI ++ cat /tmp/tmp.XRw2bybbTB ++ rm /tmp/tmp.AWyAwCF5eI /tmp/tmp.XRw2bybbTB ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GgfrqFzzOm +++ mktemp ++ local LAST_ERR=/tmp/tmp.nQtRlk5RDG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GgfrqFzzOm ++ cat /tmp/tmp.nQtRlk5RDG ++ rm /tmp/tmp.GgfrqFzzOm /tmp/tmp.nQtRlk5RDG ++ return 0 + client_pod=pxc-client-c75dc5c46-ws4bl + wait_pod pxc-client-c75dc5c46-ws4bl + local pod=pxc-client-c75dc5c46-ws4bl + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-c75dc5c46-ws4bl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-c75dc5c46-ws4bl condition met waiting for pod/pxc-client-c75dc5c46-ws4bl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vhQvWCdqT9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PTeakytxVP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vhQvWCdqT9 ++ cat /tmp/tmp.PTeakytxVP ++ rm /tmp/tmp.vhQvWCdqT9 /tmp/tmp.PTeakytxVP ++ return 0 + client_pod=pxc-client-c75dc5c46-ws4bl + wait_pod pxc-client-c75dc5c46-ws4bl + local pod=pxc-client-c75dc5c46-ws4bl + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-c75dc5c46-ws4bl + local container= + set +o xtrace pod/pxc-client-c75dc5c46-ws4bl condition met waiting for pod/pxc-client-c75dc5c46-ws4bl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1Q684xDhSp +++ mktemp ++ local LAST_ERR=/tmp/tmp.DrV4cDYbgF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1Q684xDhSp ++ cat /tmp/tmp.DrV4cDYbgF ++ rm /tmp/tmp.1Q684xDhSp /tmp/tmp.DrV4cDYbgF ++ return 0 + client_pod=pxc-client-c75dc5c46-ws4bl + wait_pod pxc-client-c75dc5c46-ws4bl + local pod=pxc-client-c75dc5c46-ws4bl + local max_retry=480 + local ns= ++ echo pxc-client-c75dc5c46-ws4bl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-c75dc5c46-ws4bl condition met waiting for pod/pxc-client-c75dc5c46-ws4bl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.qG7R1JVFpB/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.qG7R1JVFpB/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.qG7R1JVFpB/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BN7px7POUV +++ mktemp ++ local LAST_ERR=/tmp/tmp.aLRDQIbEVi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BN7px7POUV ++ cat /tmp/tmp.aLRDQIbEVi ++ rm /tmp/tmp.BN7px7POUV /tmp/tmp.aLRDQIbEVi ++ return 0 + client_pod=pxc-client-c75dc5c46-ws4bl + wait_pod pxc-client-c75dc5c46-ws4bl + local pod=pxc-client-c75dc5c46-ws4bl + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-c75dc5c46-ws4bl ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-c75dc5c46-ws4bl condition met waiting for pod/pxc-client-c75dc5c46-ws4bl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.qG7R1JVFpB/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.qG7R1JVFpB/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.qG7R1JVFpB/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RfmKduQuxS +++ mktemp ++ local LAST_ERR=/tmp/tmp.ROGcqcYLQo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RfmKduQuxS ++ cat /tmp/tmp.ROGcqcYLQo ++ rm /tmp/tmp.RfmKduQuxS /tmp/tmp.ROGcqcYLQo ++ return 0 + client_pod=pxc-client-c75dc5c46-ws4bl + wait_pod pxc-client-c75dc5c46-ws4bl + local pod=pxc-client-c75dc5c46-ws4bl + local max_retry=480 + local ns= ++ echo pxc-client-c75dc5c46-ws4bl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-c75dc5c46-ws4bl condition met waiting for pod/pxc-client-c75dc5c46-ws4bl to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.qG7R1JVFpB/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.qG7R1JVFpB/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.qG7R1JVFpB/select-1.sql + is_keyring_plugin_in_use some-name + local cluster=some-name + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + grep -E -o 'early-plugin-load=keyring_\w+.so' Unable to use a TTY - input is not a terminal or the right kind of file + return 1 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RZebJpE5Zz +++ mktemp ++ local LAST_ERR=/tmp/tmp.DfZ1mGA5HM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RZebJpE5Zz ++ cat /tmp/tmp.DfZ1mGA5HM ++ rm /tmp/tmp.RZebJpE5Zz /tmp/tmp.DfZ1mGA5HM ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.a3JDZv8OrX +++ mktemp ++ local LAST_ERR=/tmp/tmp.BxHramw0Eb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.a3JDZv8OrX ++ cat /tmp/tmp.BxHramw0Eb ++ rm /tmp/tmp.a3JDZv8OrX /tmp/tmp.BxHramw0Eb ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.l1jDaQCEaV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.rodc1QWpmV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.l1jDaQCEaV +++++ cat /tmp/tmp.rodc1QWpmV +++++ rm /tmp/tmp.l1jDaQCEaV /tmp/tmp.rodc1QWpmV +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5uFU3X4oqj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.pnDdbx6tA4 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5uFU3X4oqj +++++ cat /tmp/tmp.pnDdbx6tA4 +++++ rm /tmp/tmp.5uFU3X4oqj /tmp/tmp.pnDdbx6tA4 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jLwvm09Dfu +++ mktemp ++ local LAST_ERR=/tmp/tmp.5LKnAYu8MW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jLwvm09Dfu ++ cat /tmp/tmp.5LKnAYu8MW ++ rm /tmp/tmp.jLwvm09Dfu /tmp/tmp.5LKnAYu8MW ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check proxysql custom config' + set +o xtrace ----------------------------------------------------------------------------------- check proxysql custom config ----------------------------------------------------------------------------------- + compare_proxysql_cfg some-name-proxysql-0 proxysql-cfg + local pod=some-name-proxysql-0 + local compare_file=proxysql-cfg + local 'query=SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' + compare_mysql_cmd_local proxysql-cfg 'SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=proxysql-cfg + local 'command=SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/proxysql-cfg.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/proxysql-cfg-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT variable_value from global_variables WHERE variable_name='\''mysql-poll_timeout'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.qG7R1JVFpB/proxysql-cfg.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/proxysql-cfg.sql /tmp/tmp.qG7R1JVFpB/proxysql-cfg.sql + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-pxc + local resource=statefulset/some-name-pxc + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc.yml + local new_result=/tmp/tmp.qG7R1JVFpB/statefulset_some-name-pxc.yml + desc 'compare statefulset/some-name-pxc-' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/some-name-pxc- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k129.yml ']' + version_gt 1.27 ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k127.yml ']' + version_gt 1.24 ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k124.yml ']' + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-oc.yml ']' + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-aks.yml ']' + kubectl_bin get -o yaml statefulset/some-name-pxc ++ mktemp + local LAST_OUT=/tmp/tmp.T3z1h7KmTs + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-7615", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_ERR=/tmp/tmp.Jz3DDRukts + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-pxc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.T3z1h7KmTs + cat /tmp/tmp.Jz3DDRukts + rm /tmp/tmp.T3z1h7KmTs /tmp/tmp.Jz3DDRukts + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc.yml /tmp/tmp.qG7R1JVFpB/statefulset_some-name-pxc.yml + log 'compare_kubectl: statefulset/some-name-pxc OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T14:59:07+0000]' compare_kubectl: statefulset/some-name-pxc OK [2025-12-29T14:59:07+0000] compare_kubectl: statefulset/some-name-pxc OK + compare_kubectl statefulset/some-name-proxysql + local resource=statefulset/some-name-proxysql + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql.yml + local new_result=/tmp/tmp.qG7R1JVFpB/statefulset_some-name-proxysql.yml + desc 'compare statefulset/some-name-proxysql-' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/some-name-proxysql- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ bc -l ++ echo '1.31 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k129.yml ']' + version_gt 1.27 ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k127.yml ']' + version_gt 1.24 ++ bc -l ++ echo '1.31 >= 1.24' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k124.yml ']' + version_gt 1.22 ++ bc -l ++ echo '1.31 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-oc.yml ']' + version_gt 1.29 ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-7615", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml statefulset/some-name-proxysql ++ mktemp + local LAST_OUT=/tmp/tmp.zMgdfCzmcQ ++ mktemp + local LAST_ERR=/tmp/tmp.lF8jUraqJa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zMgdfCzmcQ + cat /tmp/tmp.lF8jUraqJa + rm /tmp/tmp.zMgdfCzmcQ /tmp/tmp.lF8jUraqJa + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql.yml /tmp/tmp.qG7R1JVFpB/statefulset_some-name-proxysql.yml + log 'compare_kubectl: statefulset/some-name-proxysql OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T14:59:08+0000]' compare_kubectl: statefulset/some-name-proxysql OK [2025-12-29T14:59:08+0000] compare_kubectl: statefulset/some-name-proxysql OK + compare_kubectl service/some-name-pxc + local resource=service/some-name-pxc + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc.yml + local new_result=/tmp/tmp.qG7R1JVFpB/service_some-name-pxc.yml + desc 'compare service/some-name-pxc-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-pxc- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k129.yml ']' + version_gt 1.27 ++ bc -l ++ echo '1.31 >= 1.27' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k127.yml ']' + version_gt 1.24 ++ bc -l ++ echo '1.31 >= 1.24' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k124.yml ']' + version_gt 1.22 ++ bc -l ++ echo '1.31 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-oc.yml ']' + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-7615", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml service/some-name-pxc ++ mktemp + local LAST_OUT=/tmp/tmp.KmdPKdeSaa ++ mktemp + local LAST_ERR=/tmp/tmp.xiN2rL4eKh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-pxc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KmdPKdeSaa + cat /tmp/tmp.xiN2rL4eKh + rm /tmp/tmp.KmdPKdeSaa /tmp/tmp.xiN2rL4eKh + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc.yml /tmp/tmp.qG7R1JVFpB/service_some-name-pxc.yml + log 'compare_kubectl: service/some-name-pxc OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T14:59:10+0000]' compare_kubectl: service/some-name-pxc OK [2025-12-29T14:59:10+0000] compare_kubectl: service/some-name-pxc OK + compare_kubectl service/some-name-proxysql + local resource=service/some-name-proxysql + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql.yml + local new_result=/tmp/tmp.qG7R1JVFpB/service_some-name-proxysql.yml + desc 'compare service/some-name-proxysql-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-proxysql- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ bc -l ++ echo '1.31 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k129.yml ']' + version_gt 1.27 ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k127.yml ']' + version_gt 1.24 ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k124.yml ']' + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-oc.yml ']' + version_gt 1.29 ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-7615", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml service/some-name-proxysql ++ mktemp + local LAST_OUT=/tmp/tmp.k8foph4neU ++ mktemp + local LAST_ERR=/tmp/tmp.u1pbggJ3tG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.k8foph4neU + cat /tmp/tmp.u1pbggJ3tG + rm /tmp/tmp.k8foph4neU /tmp/tmp.u1pbggJ3tG + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql.yml /tmp/tmp.qG7R1JVFpB/service_some-name-proxysql.yml + log 'compare_kubectl: service/some-name-proxysql OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T14:59:11+0000]' compare_kubectl: service/some-name-proxysql OK [2025-12-29T14:59:11+0000] compare_kubectl: service/some-name-proxysql OK + compare_kubectl service/some-name-proxysql-unready + local resource=service/some-name-proxysql-unready + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready.yml + local new_result=/tmp/tmp.qG7R1JVFpB/service_some-name-proxysql-unready.yml + desc 'compare service/some-name-proxysql-unready-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-proxysql-unready- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ bc -l ++ echo '1.31 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k129.yml ']' + version_gt 1.27 ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k127.yml ']' + version_gt 1.24 ++ bc -l ++ echo '1.31 >= 1.24' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k124.yml ']' + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-oc.yml ']' + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.spec.template.spec.containers[].env[] | select(.name == "XTRABACKUP_ENABLED")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.metadata.annotations."kubernetes.digitalocean.com/load-balancer-id") | del(.metadata.annotations."service.beta.kubernetes.io/do-loadbalancer-type") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-7615", "namespace") | (..+ kubectl_bin get -o yaml service/some-name-proxysql-unready | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.Etp1OJegP6 ++ mktemp + local LAST_ERR=/tmp/tmp.O81pZ2SrS8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-proxysql-unready + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Etp1OJegP6 + cat /tmp/tmp.O81pZ2SrS8 + rm /tmp/tmp.Etp1OJegP6 /tmp/tmp.O81pZ2SrS8 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready.yml /tmp/tmp.qG7R1JVFpB/service_some-name-proxysql-unready.yml + log 'compare_kubectl: service/some-name-proxysql-unready OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T14:59:13+0000]' compare_kubectl: service/some-name-proxysql-unready OK [2025-12-29T14:59:13+0000] compare_kubectl: service/some-name-proxysql-unready OK + sleep 120 + desc 'check if scheduler is enabled in all ProxySQL servers' + set +o xtrace ----------------------------------------------------------------------------------- check if scheduler is enabled in all ProxySQL servers ----------------------------------------------------------------------------------- + compare_scheduler some-name-proxysql-0 scheduler-0 + local pod=some-name-proxysql-0 + local compare_file=scheduler-0 + compare_mysql_cmd_local scheduler-0 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=scheduler-0 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-0-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.qG7R1JVFpB/scheduler-0.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql /tmp/tmp.qG7R1JVFpB/scheduler-0.sql + log 'scheduler is enabled in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:01:14+0000]' scheduler is enabled in some-name-proxysql-0: OK [2025-12-29T15:01:14+0000] scheduler is enabled in some-name-proxysql-0: OK + compare_scheduler some-name-proxysql-1 scheduler-1 + local pod=some-name-proxysql-1 + local compare_file=scheduler-1 + compare_mysql_cmd_local scheduler-1 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=scheduler-1 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.qG7R1JVFpB/scheduler-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql /tmp/tmp.qG7R1JVFpB/scheduler-1.sql + log 'scheduler is enabled in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:01:16+0000]' scheduler is enabled in some-name-proxysql-1: OK [2025-12-29T15:01:16+0000] scheduler is enabled in some-name-proxysql-1: OK + desc 'check if scheduler is doing its job in all ProxySQL servers' + set +o xtrace ----------------------------------------------------------------------------------- check if scheduler is doing its job in all ProxySQL servers ----------------------------------------------------------------------------------- + compare_mysql_servers some-name-proxysql-0 mysql-servers-0 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:01:18+0000]' mysql_servers are configured in some-name-proxysql-0: OK [2025-12-29T15:01:18+0000] mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:01:19+0000]' mysql_servers are configured in some-name-proxysql-1: OK [2025-12-29T15:01:19+0000] mysql_servers are configured in some-name-proxysql-1: OK + desc 'check disabling scheduler' + set +o xtrace ----------------------------------------------------------------------------------- check disabling scheduler ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"enabled": false}}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.cnDUItClxY ++ mktemp + local LAST_ERR=/tmp/tmp.0NFZp8hcZb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"enabled": false}}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cnDUItClxY perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.0NFZp8hcZb + rm /tmp/tmp.cnDUItClxY /tmp/tmp.0NFZp8hcZb + return 0 + sleep_with_log 90 + local d=90 + log 'sleeping for 90 seconds' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:01:21+0000]' sleeping for 90 seconds [2025-12-29T15:01:21+0000] sleeping for 90 seconds + sleep 90 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Pw1ubUliS +++ mktemp ++ local LAST_ERR=/tmp/tmp.3XHF2E3xoT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Pw1ubUliS ++ cat /tmp/tmp.3XHF2E3xoT ++ rm /tmp/tmp.3Pw1ubUliS /tmp/tmp.3XHF2E3xoT ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DNw3dy0Des +++ mktemp ++ local LAST_ERR=/tmp/tmp.BDf5PSHjoH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DNw3dy0Des ++ cat /tmp/tmp.BDf5PSHjoH ++ rm /tmp/tmp.DNw3dy0Des /tmp/tmp.BDf5PSHjoH ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Upq0kPDa04 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.3yy2H4oARN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Upq0kPDa04 +++++ cat /tmp/tmp.3yy2H4oARN +++++ rm /tmp/tmp.Upq0kPDa04 /tmp/tmp.3yy2H4oARN +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.sPm3iVh2XQ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.yt5MOeyWfp +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.sPm3iVh2XQ +++++ cat /tmp/tmp.yt5MOeyWfp +++++ rm /tmp/tmp.sPm3iVh2XQ /tmp/tmp.yt5MOeyWfp +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WjEKuBtE9H +++ mktemp ++ local LAST_ERR=/tmp/tmp.JWzT6bm95P ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WjEKuBtE9H ++ cat /tmp/tmp.JWzT6bm95P ++ rm /tmp/tmp.WjEKuBtE9H /tmp/tmp.JWzT6bm95P ++ return 0 + [[ 2 == \2 ]] + echo + compare_scheduler some-name-proxysql-0 scheduler-0-disabled + local pod=some-name-proxysql-0 + local compare_file=scheduler-0-disabled + compare_mysql_cmd_local scheduler-0-disabled 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=scheduler-0-disabled + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-0-disabled.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-0-disabled-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.qG7R1JVFpB/scheduler-0-disabled.sql ']' + sleep 20 + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-0-disabled.sql /tmp/tmp.qG7R1JVFpB/scheduler-0-disabled.sql + log 'scheduler is disabled in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:03:28+0000]' scheduler is disabled in some-name-proxysql-0: OK [2025-12-29T15:03:28+0000] scheduler is disabled in some-name-proxysql-0: OK + compare_scheduler some-name-proxysql-1 scheduler-1-disabled + local pod=some-name-proxysql-1 + local compare_file=scheduler-1-disabled + compare_mysql_cmd_local scheduler-1-disabled 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=scheduler-1-disabled + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-1-disabled.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-1-disabled-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.qG7R1JVFpB/scheduler-1-disabled.sql ']' + sleep 20 + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-1-disabled.sql /tmp/tmp.qG7R1JVFpB/scheduler-1-disabled.sql + log 'scheduler is disabled in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:03:52+0000]' scheduler is disabled in some-name-proxysql-1: OK [2025-12-29T15:03:52+0000] scheduler is disabled in some-name-proxysql-1: OK + kubectl get pod NAME READY STATUS RESTARTS AGE pxc-client-c75dc5c46-ws4bl 2/2 Running 0 10m some-name-proxysql-0 3/3 Running 0 2m6s some-name-proxysql-1 3/3 Running 0 2m31s some-name-pxc-0 1/1 Running 0 10m some-name-pxc-1 1/1 Running 0 9m4s some-name-pxc-2 1/1 Running 0 7m51s + kubectl get pxc NAME ENDPOINT STATUS PXC PROXYSQL HAPROXY AGE some-name some-name-proxysql.proxysql-scheduler-7615 ready 3 2 10m + desc 'check enabling scheduler' + set +o xtrace ----------------------------------------------------------------------------------- check enabling scheduler ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"enabled": true}}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.395nNe34VZ ++ mktemp + local LAST_ERR=/tmp/tmp.wK1tblkmi9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"enabled": true}}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.395nNe34VZ perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.wK1tblkmi9 + rm /tmp/tmp.395nNe34VZ /tmp/tmp.wK1tblkmi9 + return 0 + sleep_with_log 90 + local d=90 + log 'sleeping for 90 seconds' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:03:56+0000]' sleeping for 90 seconds [2025-12-29T15:03:56+0000] sleeping for 90 seconds + sleep 90 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RlfTp52JGZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.cMjLiCXq4m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RlfTp52JGZ ++ cat /tmp/tmp.cMjLiCXq4m ++ rm /tmp/tmp.RlfTp52JGZ /tmp/tmp.cMjLiCXq4m ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zPYzhSwlLp +++ mktemp ++ local LAST_ERR=/tmp/tmp.PatF0OFqUw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zPYzhSwlLp ++ cat /tmp/tmp.PatF0OFqUw ++ rm /tmp/tmp.zPYzhSwlLp /tmp/tmp.PatF0OFqUw ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mddUbmn5aI ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.RbS43B6D5a +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mddUbmn5aI +++++ cat /tmp/tmp.RbS43B6D5a +++++ rm /tmp/tmp.mddUbmn5aI /tmp/tmp.RbS43B6D5a +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Gx5BeluTEe ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.2NdrT4xczB +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Gx5BeluTEe +++++ cat /tmp/tmp.2NdrT4xczB +++++ rm /tmp/tmp.Gx5BeluTEe /tmp/tmp.2NdrT4xczB +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.puYJFCMhQI +++ mktemp ++ local LAST_ERR=/tmp/tmp.1pJ2uxcyVk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.puYJFCMhQI ++ cat /tmp/tmp.1pJ2uxcyVk ++ rm /tmp/tmp.puYJFCMhQI /tmp/tmp.1pJ2uxcyVk ++ return 0 + [[ 2 == \2 ]] + echo + compare_scheduler some-name-proxysql-0 scheduler-0 + local pod=some-name-proxysql-0 + local compare_file=scheduler-0 + compare_mysql_cmd_local scheduler-0 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=scheduler-0 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-0-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.qG7R1JVFpB/scheduler-0.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql /tmp/tmp.qG7R1JVFpB/scheduler-0.sql + log 'scheduler is enabled in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:05:40+0000]' scheduler is enabled in some-name-proxysql-0: OK [2025-12-29T15:05:40+0000] scheduler is enabled in some-name-proxysql-0: OK + compare_scheduler some-name-proxysql-1 scheduler-1 + local pod=some-name-proxysql-1 + local compare_file=scheduler-1 + compare_mysql_cmd_local scheduler-1 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=scheduler-1 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.qG7R1JVFpB/scheduler-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql /tmp/tmp.qG7R1JVFpB/scheduler-1.sql + log 'scheduler is enabled in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:05:42+0000]' scheduler is enabled in some-name-proxysql-1: OK [2025-12-29T15:05:42+0000] scheduler is enabled in some-name-proxysql-1: OK + desc 'check PXC pod 1 is promoted to writer when pod-0 is down' + set +o xtrace ----------------------------------------------------------------------------------- check PXC pod 1 is promoted to writer when pod-0 is down ----------------------------------------------------------------------------------- + LOOP_PID=24138 + echo LOOP_PID=24138 LOOP_PID=24138 + sleep 10 + for i in '{1..20}' + kubectl delete pod some-name-pxc-0 pod "some-name-pxc-0" deleted from proxysql-scheduler-7615 namespace + log 'waiting for pod0 to be removed from proxysql' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:05:52+0000]' waiting for pod0 to be removed from proxysql [2025-12-29T15:05:52+0000] waiting for pod0 to be removed from proxysql + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-pod0-down 24138 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-pod0-down + local loop_pid=24138 + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-pod0-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'mysql_servers are configured in some-name-proxysql-0 when pxc-0 is down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:05:54+0000]' mysql_servers are configured in some-name-proxysql-0 when pxc-0 is down: OK [2025-12-29T15:05:54+0000] mysql_servers are configured in some-name-proxysql-0 when pxc-0 is down: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-pod0-down 24138 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-pod0-down + local loop_pid=24138 + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-pod0-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'mysql_servers are configured in some-name-proxysql-1 when pxc-0 is down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:05:56+0000]' mysql_servers are configured in some-name-proxysql-1 when pxc-0 is down: OK [2025-12-29T15:05:56+0000] mysql_servers are configured in some-name-proxysql-1 when pxc-0 is down: OK + kill 24138 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/../functions: line 152: 24138 Terminated ( for i in {1..20}; do kubectl delete pod "${cluster}-pxc-0"; sleep 3; done ) + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aFInUW8GcI +++ mktemp ++ local LAST_ERR=/tmp/tmp.DvM1hDGkYT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aFInUW8GcI ++ cat /tmp/tmp.DvM1hDGkYT ++ rm /tmp/tmp.aFInUW8GcI /tmp/tmp.DvM1hDGkYT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mLkI180ZJb +++ mktemp ++ local LAST_ERR=/tmp/tmp.ogvHyT4LXM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mLkI180ZJb ++ cat /tmp/tmp.ogvHyT4LXM ++ rm /tmp/tmp.mLkI180ZJb /tmp/tmp.ogvHyT4LXM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ybu3oInarB +++ mktemp ++ local LAST_ERR=/tmp/tmp.Be2IyEl6ec ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ybu3oInarB ++ cat /tmp/tmp.Be2IyEl6ec ++ rm /tmp/tmp.ybu3oInarB /tmp/tmp.Be2IyEl6ec ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mDIAjNDp5y +++ mktemp ++ local LAST_ERR=/tmp/tmp.QdDWyBcWft ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mDIAjNDp5y ++ cat /tmp/tmp.QdDWyBcWft ++ rm /tmp/tmp.mDIAjNDp5y /tmp/tmp.QdDWyBcWft ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tPewjJs80b +++ mktemp ++ local LAST_ERR=/tmp/tmp.qrhUif2RyN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tPewjJs80b ++ cat /tmp/tmp.qrhUif2RyN ++ rm /tmp/tmp.tPewjJs80b /tmp/tmp.qrhUif2RyN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BMqRLYEz3m +++ mktemp ++ local LAST_ERR=/tmp/tmp.qqZUviLqaZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BMqRLYEz3m ++ cat /tmp/tmp.qqZUviLqaZ ++ rm /tmp/tmp.BMqRLYEz3m /tmp/tmp.qqZUviLqaZ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TQp1ESJ4XW +++ mktemp ++ local LAST_ERR=/tmp/tmp.bdGvnH1CZn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TQp1ESJ4XW ++ cat /tmp/tmp.bdGvnH1CZn ++ rm /tmp/tmp.TQp1ESJ4XW /tmp/tmp.bdGvnH1CZn ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.VKaHqjiudN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ImYMUjziO6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.VKaHqjiudN +++++ cat /tmp/tmp.ImYMUjziO6 +++++ rm /tmp/tmp.VKaHqjiudN /tmp/tmp.ImYMUjziO6 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.hEeZo9Y2LT ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.A9ZNGjbhkd +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.hEeZo9Y2LT +++++ cat /tmp/tmp.A9ZNGjbhkd +++++ rm /tmp/tmp.hEeZo9Y2LT /tmp/tmp.A9ZNGjbhkd +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CNe2ZeOhj0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qzua6iLIAG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CNe2ZeOhj0 ++ cat /tmp/tmp.qzua6iLIAG ++ rm /tmp/tmp.CNe2ZeOhj0 /tmp/tmp.qzua6iLIAG ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check scaling PXC up to 5 replicas' + set +o xtrace ----------------------------------------------------------------------------------- check scaling PXC up to 5 replicas ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 5}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.xjSkWKvLKv ++ mktemp + local LAST_ERR=/tmp/tmp.HqPL5BHp4G + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 5}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xjSkWKvLKv perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.HqPL5BHp4G + rm /tmp/tmp.xjSkWKvLKv /tmp/tmp.HqPL5BHp4G + return 0 + wait_pod some-name-pxc-3 + local pod=some-name-pxc-3 + local max_retry=480 + local ns= ++ echo some-name-pxc-3 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-3 condition met waiting for pod/some-name-pxc-3 to become Ready.Ok + wait_pod some-name-pxc-4 + local pod=some-name-pxc-4 + local max_retry=480 + local ns= ++ echo some-name-pxc-4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-4 condition met waiting for pod/some-name-pxc-4 to become Ready.Ok + sleep 120 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-1 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-1 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'new mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:11:17+0000]' new mysql_servers are configured in some-name-proxysql-0: OK [2025-12-29T15:11:17+0000] new mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-1 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-1 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'new mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:11:19+0000]' new mysql_servers are configured in some-name-proxysql-1: OK [2025-12-29T15:11:19+0000] new mysql_servers are configured in some-name-proxysql-1: OK + desc 'check scaling ProxySQL up' + set +o xtrace ----------------------------------------------------------------------------------- check scaling ProxySQL up ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"size": 3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.trfUkib55F ++ mktemp + local LAST_ERR=/tmp/tmp.I4LCq41rpS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"size": 3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.trfUkib55F perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.I4LCq41rpS + rm /tmp/tmp.trfUkib55F /tmp/tmp.I4LCq41rpS + return 0 + wait_pod some-name-proxysql-2 + local pod=some-name-proxysql-2 + local max_retry=480 + local ns= ++ echo some-name-proxysql-2 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/some-name-proxysql-2 condition met waiting for pod/some-name-proxysql-2 to become Ready.Ok + sleep 120 + compare_scheduler some-name-proxysql-2 scheduler-2 + local pod=some-name-proxysql-2 + local compare_file=scheduler-2 + compare_mysql_cmd_local scheduler-2 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + local command_id=scheduler-2 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.qG7R1JVFpB/scheduler-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2330/e2e-tests/proxysql-scheduler/compare/scheduler-2.sql /tmp/tmp.qG7R1JVFpB/scheduler-2.sql + log 'scheduler is enabled in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:13:47+0000]' scheduler is enabled in some-name-proxysql-2: OK [2025-12-29T15:13:47+0000] scheduler is enabled in some-name-proxysql-2: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2 + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2 + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-2' check runtime_mysql_servers in some-name-proxysql-2+ compare_mysql_cmd_local mysql-servers-2 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + echo + log 'mysql_servers are configured in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:13:49+0000]' mysql_servers are configured in some-name-proxysql-2: OK [2025-12-29T15:13:49+0000] mysql_servers are configured in some-name-proxysql-2: OK + desc 'check writerIsAlsoReader = false' + set +o xtrace ----------------------------------------------------------------------------------- check writerIsAlsoReader = false ----------------------------------------------------------------------------------- + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"scheduler": {"writerIsAlsoReader": false}}}}' perconaxtradbcluster.pxc.percona.com/some-name patched + sleep 10 + wait_cluster_consistency some-name 5 3 + local cluster_name=some-name + local cluster_size=5 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aHwJVYrs5M +++ mktemp ++ local LAST_ERR=/tmp/tmp.yh4OZqv83W ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aHwJVYrs5M ++ cat /tmp/tmp.yh4OZqv83W ++ rm /tmp/tmp.aHwJVYrs5M /tmp/tmp.yh4OZqv83W ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.55JbtUHEIU +++ mktemp ++ local LAST_ERR=/tmp/tmp.pXtukQFmNV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.55JbtUHEIU ++ cat /tmp/tmp.pXtukQFmNV ++ rm /tmp/tmp.55JbtUHEIU /tmp/tmp.pXtukQFmNV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7DZRtoV00K +++ mktemp ++ local LAST_ERR=/tmp/tmp.b9x4g6u7OT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7DZRtoV00K ++ cat /tmp/tmp.b9x4g6u7OT ++ rm /tmp/tmp.7DZRtoV00K /tmp/tmp.b9x4g6u7OT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g0irPvbrMw +++ mktemp ++ local LAST_ERR=/tmp/tmp.P6o9HplcP7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g0irPvbrMw ++ cat /tmp/tmp.P6o9HplcP7 ++ rm /tmp/tmp.g0irPvbrMw /tmp/tmp.P6o9HplcP7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zH43W3QxMM +++ mktemp ++ local LAST_ERR=/tmp/tmp.kmdGfGdnLq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zH43W3QxMM ++ cat /tmp/tmp.kmdGfGdnLq ++ rm /tmp/tmp.zH43W3QxMM /tmp/tmp.kmdGfGdnLq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 4 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n6E7uOrEHe +++ mktemp ++ local LAST_ERR=/tmp/tmp.3KPvLBSSqP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n6E7uOrEHe ++ cat /tmp/tmp.3KPvLBSSqP ++ rm /tmp/tmp.n6E7uOrEHe /tmp/tmp.3KPvLBSSqP ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e8F9KtgRAL +++ mktemp ++ local LAST_ERR=/tmp/tmp.vrIVMtLzD7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e8F9KtgRAL ++ cat /tmp/tmp.vrIVMtLzD7 ++ rm /tmp/tmp.e8F9KtgRAL /tmp/tmp.vrIVMtLzD7 ++ return 0 + [[ 5 == \5 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.O864HjwwTI ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.XxrZ8XnjF2 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.O864HjwwTI +++++ cat /tmp/tmp.XxrZ8XnjF2 +++++ rm /tmp/tmp.O864HjwwTI /tmp/tmp.XxrZ8XnjF2 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Jvlyb3K9sP ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.3oxPf120rr +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Jvlyb3K9sP +++++ cat /tmp/tmp.3oxPf120rr +++++ rm /tmp/tmp.Jvlyb3K9sP /tmp/tmp.3oxPf120rr +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FQQLz5EgX9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Dwr97Yom7p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FQQLz5EgX9 ++ cat /tmp/tmp.Dwr97Yom7p ++ rm /tmp/tmp.FQQLz5EgX9 /tmp/tmp.Dwr97Yom7p ++ return 0 + [[ 3 == \3 ]] + echo + sleep 60 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-1-writerNotReader + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-1-writerNotReader + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-1-writerNotReader 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'new mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:15:44+0000]' new mysql_servers are configured in some-name-proxysql-0: OK [2025-12-29T15:15:44+0000] new mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-1-writerNotReader + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-1-writerNotReader + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-1-writerNotReader 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'new mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:15:46+0000]' new mysql_servers are configured in some-name-proxysql-1: OK [2025-12-29T15:15:46+0000] new mysql_servers are configured in some-name-proxysql-1: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2-writerNotReader + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2-writerNotReader + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-2' check runtime_mysql_servers in some-name-proxysql-2+ compare_mysql_cmd_local mysql-servers-2-writerNotReader 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + echo + log 'mysql_servers are configured in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:15:48+0000]' mysql_servers are configured in some-name-proxysql-2: OK [2025-12-29T15:15:48+0000] mysql_servers are configured in some-name-proxysql-2: OK + desc 'check scaling PXC down to 3 replicas' + set +o xtrace ----------------------------------------------------------------------------------- check scaling PXC down to 3 replicas ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.7NcUhHjNBT ++ mktemp + local LAST_ERR=/tmp/tmp.RgzYZLk75s + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7NcUhHjNBT perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.RgzYZLk75s + rm /tmp/tmp.7NcUhHjNBT /tmp/tmp.RgzYZLk75s + return 0 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8MN6tvhGGr +++ mktemp ++ local LAST_ERR=/tmp/tmp.WSv2YZOgCB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8MN6tvhGGr ++ cat /tmp/tmp.WSv2YZOgCB ++ rm /tmp/tmp.8MN6tvhGGr /tmp/tmp.WSv2YZOgCB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 0 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RVX9GCuUKz +++ mktemp ++ local LAST_ERR=/tmp/tmp.DJJGz1hfkB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RVX9GCuUKz ++ cat /tmp/tmp.DJJGz1hfkB ++ rm /tmp/tmp.RVX9GCuUKz /tmp/tmp.DJJGz1hfkB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 1 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hJ7Q2k6Eu3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gB7AWEzXNJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hJ7Q2k6Eu3 ++ cat /tmp/tmp.gB7AWEzXNJ ++ rm /tmp/tmp.hJ7Q2k6Eu3 /tmp/tmp.gB7AWEzXNJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 2 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EZa3tOEynq +++ mktemp ++ local LAST_ERR=/tmp/tmp.50lpCmBvrq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EZa3tOEynq ++ cat /tmp/tmp.50lpCmBvrq ++ rm /tmp/tmp.EZa3tOEynq /tmp/tmp.50lpCmBvrq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo -n . .+ sleep 5 + [[ 3 -ge 300 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8ag5I0p95E +++ mktemp ++ local LAST_ERR=/tmp/tmp.6sncxnF5EI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8ag5I0p95E ++ cat /tmp/tmp.6sncxnF5EI ++ rm /tmp/tmp.8ag5I0p95E /tmp/tmp.6sncxnF5EI ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.haBMzBFoAz +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZE3G9IlTpt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.haBMzBFoAz ++ cat /tmp/tmp.ZE3G9IlTpt ++ rm /tmp/tmp.haBMzBFoAz /tmp/tmp.ZE3G9IlTpt ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.lSjPNbCmRy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.fZFciWwcD1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.lSjPNbCmRy +++++ cat /tmp/tmp.fZFciWwcD1 +++++ rm /tmp/tmp.lSjPNbCmRy /tmp/tmp.fZFciWwcD1 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kSnAUfB7gO ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.8p7NtO4x5x +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kSnAUfB7gO +++++ cat /tmp/tmp.8p7NtO4x5x +++++ rm /tmp/tmp.kSnAUfB7gO /tmp/tmp.8p7NtO4x5x +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YwU3fa6SYO +++ mktemp ++ local LAST_ERR=/tmp/tmp.ti5N391kwl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YwU3fa6SYO ++ cat /tmp/tmp.ti5N391kwl ++ rm /tmp/tmp.YwU3fa6SYO /tmp/tmp.ti5N391kwl ++ return 0 + [[ 3 == \3 ]] + echo + sleep 60 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-scaledown + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-scaledown + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-scaledown 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:17:28+0000]' mysql_servers are configured in some-name-proxysql-0: OK [2025-12-29T15:17:28+0000] mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-scaledown + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-scaledown + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-scaledown 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:17:29+0000]' mysql_servers are configured in some-name-proxysql-1: OK [2025-12-29T15:17:29+0000] mysql_servers are configured in some-name-proxysql-1: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2-scaledown + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2-scaledown + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-2' check runtime_mysql_servers in some-name-proxysql-2+ compare_mysql_cmd_local mysql-servers-2-scaledown 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + echo + log 'mysql_servers are configured in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:17:31+0000]' mysql_servers are configured in some-name-proxysql-2: OK [2025-12-29T15:17:31+0000] mysql_servers are configured in some-name-proxysql-2: OK + desc 'check PXC pod 0 is reader and writer when pods 1 and 2 are down' + set +o xtrace ----------------------------------------------------------------------------------- check PXC pod 0 is reader and writer when pods 1 and 2 are down ----------------------------------------------------------------------------------- + log 'scaling PXC down to 1 replica' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:17:31+0000]' scaling PXC down to 1 replica [2025-12-29T15:17:31+0000] scaling PXC down to 1 replica + kubectl scale sts/some-name-pxc --replicas=1 statefulset.apps/some-name-pxc scaled + sleep 20 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-two-pod-down + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-two-pod-down + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-0' check runtime_mysql_servers in some-name-proxysql-0+ compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 1 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 2 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 3 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 4 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + sleep 10 + echo -n . .+ let retry+=1 + [[ 5 -ge 30 ]] + compare_mysql_cmd_local mysql-servers-0-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + echo + log 'mysql_servers are configured in some-name-proxysql-0 when 2 pods are down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:18:52+0000]' mysql_servers are configured in some-name-proxysql-0 when 2 pods are down: OK [2025-12-29T15:18:52+0000] mysql_servers are configured in some-name-proxysql-0 when 2 pods are down: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-two-pod-down + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-two-pod-down + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-1' check runtime_mysql_servers in some-name-proxysql-1+ compare_mysql_cmd_local mysql-servers-1-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + echo + log 'mysql_servers are configured in some-name-proxysql-1 when 2 pods are down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:18:54+0000]' mysql_servers are configured in some-name-proxysql-1 when 2 pods are down: OK [2025-12-29T15:18:54+0000] mysql_servers are configured in some-name-proxysql-1 when 2 pods are down: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2-two-pod-down + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2-two-pod-down + local loop_pid= + local 'query=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local retry=0 + echo -n 'check runtime_mysql_servers in some-name-proxysql-2' check runtime_mysql_servers in some-name-proxysql-2+ compare_mysql_cmd_local mysql-servers-2-two-pod-down 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + echo + log 'mysql_servers are configured in some-name-proxysql-2 when 2 pods are down: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-29T15:18:56+0000]' mysql_servers are configured in some-name-proxysql-2 when 2 pods are down: OK [2025-12-29T15:18:56+0000] mysql_servers are configured in some-name-proxysql-2 when 2 pods are down: OK + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + destroy proxysql-scheduler-7615 + local namespace=proxysql-scheduler-7615 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + sort -u + tee /tmp/tmp.qG7R1JVFpB/operator.log + grep -v 'get backup status: Job.batch' + grep -v level=info ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ head -1 +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y3bJ3JVVSZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.cfeJKPl2OV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y3bJ3JVVSZ ++ cat /tmp/tmp.cfeJKPl2OV ++ rm /tmp/tmp.Y3bJ3JVVSZ /tmp/tmp.cfeJKPl2OV ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-65b6f89d5d-hztxk ++ mktemp + local LAST_OUT=/tmp/tmp.YM13D87W1v ++ mktemp + local LAST_ERR=/tmp/tmp.o0EUpfWKta + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-65b6f89d5d-hztxk + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YM13D87W1v + cat /tmp/tmp.o0EUpfWKta + rm /tmp/tmp.YM13D87W1v /tmp/tmp.o0EUpfWKta + return 0 2025-12-29T14:52:51.596Z INFO setup Feature gates {"PXCO_FEATURE_GATES": "", "enabled": ""} 2025-12-29T14:52:51.596Z INFO setup Manager starting up {"gitCommit": "8293f0719903b3ac6d61887112eb49702bd931a3", "gitBranch": "PR-2330-8293f071", "buildTime": "2025-12-29T12:59:03Z", "goVersion": "go1.25.5", "os": "linux", "arch": "amd64"} 2025-12-29T14:52:51.596Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.14-gke.1156000"} 2025-12-29T14:52:51.599Z INFO setup Registering Components. 2025-12-29T14:52:52.392Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-12-29T14:52:52.393Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-12-29T14:52:52.393Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-12-29T14:52:52.393Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-12-29T14:52:52.393Z INFO controller-runtime.metrics Starting metrics server 2025-12-29T14:52:52.393Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-12-29T14:52:52.393Z INFO controller-runtime.webhook Starting webhook server 2025-12-29T14:52:52.393Z INFO setup Starting the Cmd. 2025-12-29T14:52:52.393Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-12-29T14:52:52.493Z INFO Attempting to acquire leader lease... {"lock": "pxc-operator/08db1feb.percona.com"} 2025-12-29T14:52:52.538Z DEBUG events percona-xtradb-cluster-operator-65b6f89d5d-hztxk_e993c41c-4fd5-4be0-aeed-5e4301c9f6b2 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"acb51f33-be97-45d1-abf9-a95abbe4a00d","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1767019972522223009"}, "reason": "LeaderElection"} 2025-12-29T14:52:52.538Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2025-12-29T14:52:52.538Z INFO Successfully acquired lease {"lock": "pxc-operator/08db1feb.percona.com"} 2025-12-29T14:52:52.539Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-12-29T14:52:52.539Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-12-29T14:52:52.539Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-12-29T14:52:52.640Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2025-12-29T14:52:52.640Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2025-12-29T14:52:52.640Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2025-12-29T14:52:52.640Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2025-12-29T14:52:52.740Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2025-12-29T14:52:52.740Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2025-12-29T14:53:38.816Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "96d7eae3-4d9b-4395-bdf8-afdf70511c09", "version": "1.19.0"} 2025-12-29T14:53:39.128Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "96d7eae3-4d9b-4395-bdf8-afdf70511c09", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-12-29T14:53:39.144Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "96d7eae3-4d9b-4395-bdf8-afdf70511c09", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-12-29T14:53:39.277Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "96d7eae3-4d9b-4395-bdf8-afdf70511c09", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-29T14:53:39.306Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "96d7eae3-4d9b-4395-bdf8-afdf70511c09", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-29T14:53:39.350Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "96d7eae3-4d9b-4395-bdf8-afdf70511c09", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-29T14:53:39.375Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "96d7eae3-4d9b-4395-bdf8-afdf70511c09", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-29T14:53:39.398Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "96d7eae3-4d9b-4395-bdf8-afdf70511c09", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-29T14:53:39.478Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "96d7eae3-4d9b-4395-bdf8-afdf70511c09", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-29T14:53:40.270Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "61e1ad56-fd8d-4dae-a03b-774da6499958", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-29T14:53:40.290Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "61e1ad56-fd8d-4dae-a03b-774da6499958", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-29T14:54:50.942Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "966dfce2-5103-49a2-bc81-b5e10ac98c89", "user": "operator"} 2025-12-29T14:54:50.969Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "966dfce2-5103-49a2-bc81-b5e10ac98c89", "user": "monitor"} 2025-12-29T14:54:51.010Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "966dfce2-5103-49a2-bc81-b5e10ac98c89"} 2025-12-29T14:54:51.038Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "966dfce2-5103-49a2-bc81-b5e10ac98c89"} 2025-12-29T14:54:51.063Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "966dfce2-5103-49a2-bc81-b5e10ac98c89", "user": "xtrabackup"} 2025-12-29T14:54:51.098Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "966dfce2-5103-49a2-bc81-b5e10ac98c89"} 2025-12-29T14:54:51.123Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "966dfce2-5103-49a2-bc81-b5e10ac98c89", "user": "replication"} 2025-12-29T14:54:51.130Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "966dfce2-5103-49a2-bc81-b5e10ac98c89", "err": "get primary pxc pod: not found"} 2025-12-29T14:54:55.904Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "292f636d-8083-47fc-a539-25d132a945d8", "err": "get primary pxc pod: not found"} 2025-12-29T14:55:00.993Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "f311e694-9a50-4eab-9011-93408a231bc5", "err": "get primary pxc pod: not found"} 2025-12-29T14:57:20.984Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "1d749a0d-fefd-4b5c-8a44-6df6bf35d585", "user": "root"} 2025-12-29T14:57:21.108Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "1d749a0d-fefd-4b5c-8a44-6df6bf35d585", "new version": "8.0.43-34.1"} 2025-12-29T14:57:23.541Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "1d749a0d-fefd-4b5c-8a44-6df6bf35d585"} 2025-12-29T14:57:28.242Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "53037851-9307-4f61-abf9-c1de15979909"} 2025-12-29T14:57:33.666Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "49730980-49de-41a3-bc92-dc56e020cfc3"} 2025-12-29T14:57:39.122Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "d2518ce2-8e87-4c91-abe4-b5903e5c7fa6"} 2025-12-29T14:57:44.154Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "2bc1cd60-a0e3-4100-b743-6bc74e16978f"} 2025-12-29T14:57:49.615Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "cc409ff1-9f82-4737-8513-a7231bc7e368"} 2025-12-29T14:57:55.019Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "5b5084d6-95d2-492a-b866-53fc70d3ee94"} 2025-12-29T14:58:00.116Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "0c21f525-736b-4cf5-a6b7-4b46d0c05212"} 2025-12-29T14:58:05.544Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "35fd1bdf-f976-40ad-8fea-40254268e871"} 2025-12-29T14:58:10.750Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "0fd3b522-a2a7-4b8b-8692-a7dc476ae1f7"} 2025-12-29T14:58:15.869Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "404a4780-bf26-4eb6-bd87-e16364117a4d"} 2025-12-29T14:58:21.227Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "f140155c-eef6-48d9-85f8-0e8d3e857f11"} 2025-12-29T14:58:26.430Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "0e8fd323-1767-43a0-bfd8-eabaf5f6602a"} 2025-12-29T14:58:31.799Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "0e0d1999-c5a2-485d-afd8-2a9c94f7c784"} 2025-12-29T14:58:37.120Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "703665df-1bce-44b6-be2b-7673de1f0e6b"} 2025-12-29T14:58:41.841Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "bda413ff-302e-40e1-8d33-fd3fd972e97c"} 2025-12-29T14:58:47.548Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "ea71b530-00f7-4eaf-addf-864cbef9defe"} 2025-12-29T14:58:52.657Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "e41f62d1-1c91-495e-b59b-6208ab30327b"} 2025-12-29T14:58:58.244Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "d2e2a49d-c8dd-41f7-8ec0-833f5c87f361"} 2025-12-29T14:59:03.424Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "9accc42e-9366-4489-8f3b-2619330ab5ee"} 2025-12-29T14:59:08.446Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "b043d966-ae26-47e0-8d5a-aeffde46dff7"} 2025-12-29T14:59:13.850Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "e864a860-9603-467a-ad52-58e1dfccf415"} 2025-12-29T14:59:19.136Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "e9a79ed8-5954-4347-b010-855634ce891d"} 2025-12-29T14:59:23.870Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "4937090d-8759-49fd-8e1e-9e83b4ec14c9"} 2025-12-29T14:59:29.517Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "cf8ed60c-cb3b-4efa-8606-f5b3ed2eecd8"} 2025-12-29T14:59:34.724Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "ed903239-78a7-4c6f-aa10-58bcf142fdf9"} 2025-12-29T14:59:39.952Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "b11fee9e-1cb3-400d-903d-990a4e72826b"} 2025-12-29T14:59:45.053Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "e9e1bc29-2e2f-44ba-b593-f09d6ea15e51"} 2025-12-29T14:59:50.473Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "adfb8ea8-bef3-4534-8423-ed11b3c3d66f"} 2025-12-29T14:59:55.467Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "5abbb5a5-a249-4f6e-8c9c-272420f7f6e5"} 2025-12-29T15:00:00.925Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "3571042c-ec30-4ca9-8676-39c146330442"} 2025-12-29T15:00:06.219Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "5ebdbb48-66ba-4e8d-9cc7-ad3b13ef8485"} 2025-12-29T15:00:11.215Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "d13d233e-37df-43b8-898c-6bfdbcd6b6ef"} 2025-12-29T15:00:16.657Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "cb97613e-b92d-4c7d-b8db-9d6a5740ddb8"} 2025-12-29T15:00:21.831Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "1dcb07cc-d83d-4f6f-86ab-1e1688bfa44f"} 2025-12-29T15:00:27.320Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "b13c30f5-5bb2-4b41-b685-180e0fa2d14d"} 2025-12-29T15:00:32.276Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "4071ac42-b37b-40c3-b33a-1ecf3410f557"} 2025-12-29T15:00:37.772Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "b7700ed0-f65a-4d54-aa01-81cdfb0b510b"} 2025-12-29T15:00:42.958Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "c4981938-3910-4131-862a-a88f13a2e0c7"} 2025-12-29T15:00:48.131Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "27a20449-9748-415a-922e-34bbf677af99"} 2025-12-29T15:00:53.520Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "27be8661-edb0-4a11-8fa7-7b3b3e4c480b"} 2025-12-29T15:00:58.824Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "fc79d525-df18-41c7-9733-04a6dc966581"} 2025-12-29T15:01:04.126Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "8f577455-b4c2-4a6c-a59a-c0766430a1c1"} 2025-12-29T15:01:09.232Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "5823d1bc-7aa2-4913-b2b4-4556c105a4fe"} 2025-12-29T15:01:14.742Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "f6a4cb3b-51b1-4335-88a5-3d3914a74074"} 2025-12-29T15:01:20.126Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "e3f7d918-1183-4ee7-b01f-613c6ed6d9b4"} 2025-12-29T15:01:21.575Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "9b86312f-3484-4ca9-a3a2-fbee9f335cb0", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T15:01:21.619Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "9b86312f-3484-4ca9-a3a2-fbee9f335cb0", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T15:01:24.187Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "9b86312f-3484-4ca9-a3a2-fbee9f335cb0", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T15:02:04.107Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "2185a78c-ec4c-4e93-9698-f3f1deacf7fe", "err": "get primary pxc pod: not found"} 2025-12-29T15:02:07.305Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "ed90eae1-eab3-4e80-8a3b-5d7608c53751", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\nERROR (line:1290) : Could not find a cluster (with writer_hostgroup:11) to update\nERROR (line:572) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T15:02:22.802Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "db2e63df-e186-4806-aa93-047bb723e640"} 2025-12-29T15:02:27.790Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "68f97d9e-824a-4b06-9554-52ab3cfdafa0"} 2025-12-29T15:02:33.180Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "cf610f17-3a80-42cf-be0a-f021459d2011"} 2025-12-29T15:02:38.383Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "a5de9a81-a1a9-4b7b-b1e7-09f2d95a64a3"} 2025-12-29T15:02:43.617Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "a8c5edb0-c14e-428f-b709-7d81ec371123"} 2025-12-29T15:02:49.105Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "50fcd4bc-bf83-4b66-bfe9-3ef767f49c7c"} 2025-12-29T15:02:54.207Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "266335db-2e6c-4b30-8331-7170f8a9b469"} 2025-12-29T15:02:59.503Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "ec66c35f-68b3-4030-aa7f-992ec9f4bada"} 2025-12-29T15:03:04.685Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "f929a25b-fa34-44e4-b2f8-fa42c0aa0bb2"} 2025-12-29T15:03:09.794Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "548d74ac-31e5-46c4-891d-f51c096786b2"} 2025-12-29T15:03:15.307Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "29d2ee25-38e2-42cb-b3e9-fff5507a8e51"} 2025-12-29T15:03:20.496Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "89443b36-9c3a-46fc-b886-36cc286e2ef1"} 2025-12-29T15:03:25.699Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "17636173-8bdc-4d33-80ef-e0cb8fb63164"} 2025-12-29T15:03:30.931Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "801cd239-ab18-4dcf-894d-48029cde28c8"} 2025-12-29T15:03:36.212Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "2f63798d-dc20-4831-b7a1-17a9f06fa89c"} 2025-12-29T15:03:41.519Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "7fa6d74f-920b-44f0-bc50-4e238d181d25"} 2025-12-29T15:03:46.881Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "92838dca-76fd-4230-8788-24a4930c95e9"} 2025-12-29T15:03:51.818Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "104927aa-4b12-421b-b988-72ac70b93b62"} 2025-12-29T15:03:57.040Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "c4485619-4f90-498f-8e52-7cec06844848", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T15:03:57.080Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "c4485619-4f90-498f-8e52-7cec06844848", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T15:03:58.170Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "d78e9e49-b6b9-4672-91e2-bf1d1967891a", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.proxysql-scheduler-7615.svc.cluster.local:3306) to ProxySQL\n / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.proxysql-scheduler-7615.svc.cluster.local:3306) to ProxySQL\n / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T15:04:38.056Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "57490b51-d7aa-43c7-b733-ff196323958b", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.34.154.38:6032: i/o timeout"} 2025-12-29T15:04:43.265Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "ddc16aea-acbb-4f00-a561-8c88ba8cb5f1", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T15:04:48.162Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "00d0f165-47be-4adb-871d-094e3dad9f87", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T15:04:53.583Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "507c459d-7c74-4e86-944b-ee20705f42b9", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T15:04:57.443Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "2ebbad99-532d-4c33-b63d-6ba08fdf3e5d", "error": "syncusers: ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "syncusers: ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:979\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T15:05:02.036Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "4f3f439e-40ae-42ab-9910-00bae0ffc1cf"} 2025-12-29T15:05:07.160Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "12fd2059-0836-42e6-b15a-f3d08c18b2df"} 2025-12-29T15:05:12.574Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "9dda9cea-dfae-4e2c-8b18-d93bab76049c"} 2025-12-29T15:05:17.763Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "292f2253-cdee-4b55-b610-0a63334e12b6"} 2025-12-29T15:05:22.946Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "7642e2ec-9c74-4518-ada0-ba0a9b4ed158"} 2025-12-29T15:05:28.190Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "63c6813c-06dc-4eed-921c-144bb55fe531"} 2025-12-29T15:05:33.373Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "81204169-5a2a-4ad1-93e0-e37042f35b82"} 2025-12-29T15:05:38.573Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "42e353a0-30dd-45ce-80a8-e59677fa7353"} 2025-12-29T15:05:43.863Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "2c6e0cc2-e8bb-4532-92f9-f2dc3976264a"} 2025-12-29T15:05:49.572Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "f34504e2-4549-42b9-9dc2-057c1170033f"} 2025-12-29T15:05:58.023Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "5f6286e3-24a2-4127-bb58-83d9d7dabd30", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.34.152.37:33062: connect: connection refused"} 2025-12-29T15:06:03.146Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "295c4e39-8b4c-401e-a5b8-d76f3ffad6e0", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-7615.svc.cluster.local"} 2025-12-29T15:06:03.307Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "b3d1f092-7432-48e8-a47c-9e42beaf7d49", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-7615.svc.cluster.local"} 2025-12-29T15:06:08.295Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "00d3906b-85dc-4d32-a331-69d1283e13bd", "primary name": "some-name-pxc-0.some-name-pxc.proxysql-scheduler-7615.svc.cluster.local"} 2025-12-29T15:06:37.167Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "23416de6-9520-4326-8a84-19a13b71a61b"} 2025-12-29T15:06:42.135Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "246dc85b-5bdf-4a8a-8110-8a3bc1d45cc5"} 2025-12-29T15:06:46.387Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "94b86fed-ca14-4524-b483-82dc2060bfda", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T15:06:46.434Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "94b86fed-ca14-4524-b483-82dc2060bfda", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T15:06:47.146Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "291829cf-b8a5-4dba-be4e-1fc07fa72abb"} 2025-12-29T15:09:20.178Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "93293e60-65d3-4ecc-baee-c1274186c9aa"} 2025-12-29T15:09:24.488Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "3fa98ec0-bd18-436f-9534-652a92efd71d"} 2025-12-29T15:09:29.874Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "5e479046-1724-451d-9c9c-3dbebf57cdd8"} 2025-12-29T15:09:35.051Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "374cb36e-b95c-4bfb-8a34-4df47a29305d"} 2025-12-29T15:09:40.276Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "e9e28908-eb00-4ffc-bb0f-23b98483054b"} 2025-12-29T15:09:45.840Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "56a6a815-b796-42b3-a508-b338538421b7"} 2025-12-29T15:09:51.462Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "78c0a772-0d56-41f4-9f43-13fb7a3550a2"} 2025-12-29T15:09:56.646Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "8497be5b-8d47-4e3c-9713-0a1b26f3f40b"} 2025-12-29T15:10:01.973Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "d233edf7-1fec-4175-ba27-60cbaed8e7d3"} 2025-12-29T15:10:07.050Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "fce5a788-0744-4e08-a67e-314bb88b69b0"} 2025-12-29T15:10:12.673Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "22902805-78ab-4770-b968-25a996c04ff4"} 2025-12-29T15:10:17.981Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "4f190ebf-98d2-44f2-93d5-1c74b2a330c0"} 2025-12-29T15:10:23.287Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "75c4d60e-c14f-4857-9a4b-f4df153d5acd"} 2025-12-29T15:10:28.654Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "0a74d819-a9f9-45dc-9dff-6e0992d120cb"} 2025-12-29T15:10:34.486Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "34c1068c-40dc-4578-8d04-b149898d8069"} 2025-12-29T15:10:39.672Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "a6709e1a-ffdc-40fe-93cb-e0ca26762500"} 2025-12-29T15:10:45.144Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "27931919-c127-4f9d-a76c-36675ceecdbe"} 2025-12-29T15:10:50.273Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "e46c2176-95bf-4d09-bd0f-28e36ab68e1e"} 2025-12-29T15:10:55.492Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "c61cd95a-fe1d-4df5-81bc-5fa81a993e78"} 2025-12-29T15:11:00.975Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "caf7a2e9-11cf-418e-8776-0913b8c16ccd"} 2025-12-29T15:11:06.541Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "23f074f8-795b-4f01-a97d-926f7e43b69b"} 2025-12-29T15:11:11.779Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "b76b76ca-d420-439c-b1bc-5c207cecdad8"} 2025-12-29T15:11:17.162Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "bee714f6-bbd0-40f5-928c-9f0517dcc98e"} 2025-12-29T15:11:21.677Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "9b6fb865-f5b3-43eb-ab1c-7ff6b94e6079", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T15:11:21.727Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "9b6fb865-f5b3-43eb-ab1c-7ff6b94e6079", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T15:11:22.646Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "b3af88e2-181a-47d0-845b-41552128ef7f"} 2025-12-29T15:11:51.511Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "4703a7b9-17e7-46d0-bb0d-e1880285f1f1", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T15:11:59.456Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "d070542d-93f2-4fa9-b5f9-cea6f726cedd", "error": "syncusers: ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "syncusers: ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:979\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T15:12:08.416Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "bb78007b-e4c4-4b0c-81d5-217ec6c656a0"} 2025-12-29T15:12:13.831Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "606ba35f-c1c2-40fa-a9b7-49149e2d5233"} 2025-12-29T15:12:19.215Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "6ced1bb4-25ed-4543-972a-d79a04d7d723"} 2025-12-29T15:12:24.402Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "ec2ff770-5f27-4db9-98ac-052e97ffd597"} 2025-12-29T15:12:30.027Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "e54b5477-85fa-4e7c-9b9b-b355bbba9529"} 2025-12-29T15:12:34.892Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "02b0677d-d0d8-49ef-840f-4e3103b1c4f4"} 2025-12-29T15:12:40.499Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "2233b888-06b9-4dc2-9bb6-754cebb62315"} 2025-12-29T15:12:45.597Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "b04eb8d7-11d3-4576-afbf-8c1ad8b46710"} 2025-12-29T15:12:51.098Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "679fcb27-0c5e-4dc7-8d33-af7188f820f4"} 2025-12-29T15:12:56.180Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "a1f7ba30-b6c2-4dfa-864e-64784cd435e0"} 2025-12-29T15:13:01.414Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "2f31740d-5da7-4fcf-93ba-575daf2da479"} 2025-12-29T15:13:06.685Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "001818a3-b83a-43ec-b0a9-52f94fee1370"} 2025-12-29T15:13:12.337Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "dad96b0f-e12b-4723-9acf-76c24d9e01fc"} 2025-12-29T15:13:17.428Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "80074c2f-3a83-4954-a223-713a6ea6b9e4"} 2025-12-29T15:13:23.033Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "c9b4d476-92c8-4a42-a9a9-b5733edb3703"} 2025-12-29T15:13:28.312Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "36e4e33b-50c5-44b5-a77b-91c1d08710a9"} 2025-12-29T15:13:33.702Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "b12be298-ee2e-42b4-ab3d-22d4aa0f0bd1"} 2025-12-29T15:13:38.601Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "d9f669e4-68a7-477f-847e-45391e295531"} 2025-12-29T15:13:44.234Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "ead06755-a618-4893-a09f-b28ece70f155"} 2025-12-29T15:13:49.685Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "dc20bd0c-2da4-4947-a007-3d9781522e7f"} 2025-12-29T15:13:50.511Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "c8482a40-a39d-43d7-829b-8084c3040159", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T15:13:50.550Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "c8482a40-a39d-43d7-829b-8084c3040159", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T15:13:53.607Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "c8482a40-a39d-43d7-829b-8084c3040159", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-29T15:14:40.559Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "45be8731-61d5-4722-a112-6fcaeaf21694"} 2025-12-29T15:14:45.643Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "537bc55e-2b9a-4dbc-8cf3-5a0eff733589"} 2025-12-29T15:14:58.347Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "b6f84ea5-f44f-4171-ac92-9345c34329a8"} 2025-12-29T15:15:03.833Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "9c5f04fc-aeea-4152-a2ab-db023e28a8dc"} 2025-12-29T15:15:09.280Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "76bcb392-11b9-4356-984a-9f8559f4e400"} 2025-12-29T15:15:14.332Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "7f68769c-327d-407b-aec6-ca14825f148b"} 2025-12-29T15:15:19.740Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "ff48bf41-c606-4152-884f-6ef693ac7105"} 2025-12-29T15:15:25.181Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "8255f307-74e8-481f-83fa-f86411dd3ff5"} 2025-12-29T15:15:30.730Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "35c4d803-4e08-4fb7-8f8d-d9cd69a8b83c"} 2025-12-29T15:15:36.062Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "e6df209b-bf27-462d-ae9f-0280f57e9ed1"} 2025-12-29T15:15:41.349Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "9ca9d67c-bd33-430f-9bd7-c784eb2a5a40"} 2025-12-29T15:15:46.435Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "65789a25-c6f8-440c-ab18-30e059fcd87a"} 2025-12-29T15:15:49.912Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "cfd379d2-6d7a-4e09-9588-82772c9645a0", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T15:15:49.958Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "cfd379d2-6d7a-4e09-9588-82772c9645a0", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-29T15:15:52.253Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "89cffae9-de9e-4fa4-ba27-de335c6b9848"} 2025-12-29T15:15:58.856Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "5b09d0bb-88d9-4b17-bcaf-1fa06c25de33", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-4: dial tcp: lookup some-name-pxc-4.some-name-pxc.proxysql-scheduler-7615 on 34.118.224.10:53: no such host"} 2025-12-29T15:16:04.158Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "59a5ace6-86da-42cc-8fc0-76e670c7e95d", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-4: dial tcp 10.34.154.40:33062: connect: connection refused"} 2025-12-29T15:16:14.586Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "a67dba9a-aad2-40b1-a62e-16bebf02f9f4", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-3: dial tcp: lookup some-name-pxc-3.some-name-pxc.proxysql-scheduler-7615 on 34.118.224.10:53: no such host"} 2025-12-29T15:16:25.258Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "259199e4-7d6e-4b77-b0ba-3d47dfa004b6"} 2025-12-29T15:16:33.933Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "819350af-c11c-4dfd-bb8b-39d7a0782600"} 2025-12-29T15:16:39.357Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "eb861aec-a42a-491e-9cd9-b3383b0124cb"} 2025-12-29T15:16:44.456Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "5dfd04f3-0307-4933-a811-3a1690050654"} 2025-12-29T15:16:49.735Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "40291467-9d21-4518-90c4-2874cafdadec"} 2025-12-29T15:16:54.940Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "abe84752-3f9e-4b72-a242-665088fbcbfd"} 2025-12-29T15:17:00.168Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "6d0c5867-83ad-4334-9eb5-c530f2a7a76b"} 2025-12-29T15:17:05.656Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "8dd984b1-4ac5-4df0-b8c1-01b681269521"} 2025-12-29T15:17:10.785Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "647537da-cd42-4342-967d-2dd71698b378"} 2025-12-29T15:17:16.365Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "703c4699-ada8-452a-9319-37596ee04d41"} 2025-12-29T15:17:21.557Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "d65b0bea-c5be-4c74-8cf3-53ed1977ffc3"} 2025-12-29T15:17:26.738Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "d22cb3d1-a726-44df-ab4d-87c12b4526ba"} 2025-12-29T15:17:32.076Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "b374b134-d442-4926-983b-464cd9c7eaa5"} 2025-12-29T15:17:37.037Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "d976888e-ce12-431d-b320-bbe7a26e5a84"} 2025-12-29T15:17:39.349Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "130afcab-3501-4e95-905d-3f54d6defc0b", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.proxysql-scheduler-7615 on 34.118.224.10:53: no such host"} 2025-12-29T15:17:44.557Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "0ca109fc-5ca4-431c-9a78-c3cf9493a270", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.proxysql-scheduler-7615 on 34.118.224.10:53: no such host"} 2025-12-29T15:18:00.301Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-7615"}, "namespace": "proxysql-scheduler-7615", "name": "some-name", "reconcileID": "ba0be7d5-6bf7-4be5-91c5-a72645f8b2e3", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.34.154.37:33062: connect: connection refused"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:857 -  }, -  { -  }, -  { -  }, -  }, +  }, +  { +  }, +  }, -  Annotations: map[string]string{ +  Annotations: map[string]string{ -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", -  Args: []string{ +  Args: []string{ +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, -  AvailableReplicas: 5, -  CollisionCount: &0, +  CollisionCount: nil, -  Command: []string{"/opt/percona/proxysql-entrypoint.sh"}, +  Command: []string{"/opt/percona/proxysql-entrypoint.sh"}, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-12-29 14:53:39 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, -  CurrentReplicas: 5, +  CurrentRevision: "", -  CurrentRevision: "some-name-proxysql-669d589b5b", -  CurrentRevision: "some-name-proxysql-75c5fd5d74", -  CurrentRevision: "some-name-pxc-7f48d99997", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, +  EnvFrom: []v1.EnvFromSource{{SecretRef: s"&SecretEnvSource{LocalObjectRefe"...}}, -  Env: []v1.EnvVar{ +  Env: []v1.EnvVar{ -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, -  Generation: 2, -  Generation: 3, -  Generation: 4, -  Image: "perconalab/percona-xtradb-cluster-operator:main-proxysql", +  Image: "perconalab/percona-xtradb-cluster-operator:main-proxysql", -  ImagePullPolicy: "Always", +  ImagePullPolicy: "Always", +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", -  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, +  {Name: "MONITOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "OPERATOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, +  {Name: "OPERATOR_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, +  {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: s"&EnvVarSource{FieldRef:nil,Resou"...}, -  {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, +  {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, -  Name: "proxysql-monit", +  Name: "proxysql-monit", -  {Name: "PROXYSQL_SERVICE", Value: "some-name-proxysql-unready"}, +  {Name: "PROXYSQL_SERVICE", Value: "some-name-proxysql-unready"}, -  {Name: "SCHEDULER_ENABLED", Value: "true"}, +  {Name: "SCHEDULER_ENABLED", Value: "true"}, +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  ObservedGeneration: 2, -  ObservedGeneration: 3, -  ObservedGeneration: 4, -  "-on-change=/opt/percona/proxysql_add_proxysql_nodes.sh", +  "-on-change=/opt/percona/proxysql_add_proxysql_nodes.sh", -  Operation: "Update", -  Operation: "Update", -  "/opt/percona/peer-list", +  "/opt/percona/peer-list", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzMzAtODI5M2YwNzEiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzMzAtODI5M2YwNzEiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifSx7Im5hbWUiOiJTQ0hFRFVMRVJfRU5BQkxFRCIsInZhbHVlIjoidHJ1ZSJ9XSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI3MDBtIiwibWVtb3J5IjoiMUcifSwicmVxdWVzdHMiOnsiY3B1IjoiMTAwbSIsIm1lbW9y"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzMzAtODI5M2YwNzEiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzMzAtODI5M2YwNzEiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifSx7Im5hbWUiOiJTQ0hFRFVMRVJfRU5BQkxFRCIsInZhbHVlIjoidHJ1ZSJ9XSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI3MDBtIiwibWVtb3J5IjoiMUcifSwicmVxdWVzdHMiOnsiY3B1IjoiMTAwbSIsIm1lbW9y"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzMzAtODI5M2YwNzEiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzMzAtODI5M2YwNzEiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifV0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNzAwbSIsIm1lbW9yeSI6IjFHIn0sInJlcXVlc3RzIjp7ImNwdSI6IjEwMG0iLCJtZW1vcnkiOiIxMDBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJwcm94eWRh"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzMzAtODI5M2YwNzEiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzMzAtODI5M2YwNzEiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifV0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNzAwbSIsIm1lbW9yeSI6IjFHIn0sInJlcXVlc3RzIjp7ImNwdSI6IjEwMG0iLCJtZW1vcnkiOiIxMDBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJwcm94eWRh"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzMzAtODI5M2YwNzEiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzMzAtODI5M2YwNzEiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifSx7Im5hbWUiOiJTQ0hFRFVMRVJfRU5BQkxFRCIsInZhbHVlIjoidHJ1ZSJ9XSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI3MDBtIiwibWVtb3J5IjoiMUcifSwicmVxdWVzdHMiOnsiY3B1IjoiMTAwbSIsIm1lbW9y"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBwLmt1YmVybmV0ZXMuaW8vbmFtZSI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIiLCJhcHAua3ViZXJuZXRlcy5pby9wYXJ0LW9mIjoicGVyY29uYS14dHJhZGItY2x1c3RlciJ9fSwidGVtcGxhdGUiOnsibWV0YWRhdGEiOnsibGFiZWxzIjp7ImFwcC5rdWJlcm5ldGVzLmlvL2NvbXBvbmVudCI6InByb3h5c3FsIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJlcm5ldGVzLmlvL25hbWUiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyIiwiYXBwLmt1YmVybmV0ZXMuaW8vcGFydC1vZiI6InBlcmNvbmEteHRyYWRiLWNsdXN0ZXIifSwiYW5ub3RhdGlvbnMiOnsia3ViZWN0bC5rdWJlcm5ldGVzLmlvL2RlZmF1bHQtY29udGFpbmVyIjoicHJveHlzcWwiLCJwZXJjb25hLmNvbS9jb25maWd1cmF0aW9uLWhhc2giOiJjMDBlODlkMzAxNzRkNmFhN2FkODM4OGQ3ZTIxYTI3YiIsInBlcmNvbmEuY29tL3NzbC1oYXNoIjoiY2ZkNmE1MjM5ODI2ODE3M2I1MWQ3Y2RiMTMzMWMwOWEiLCJwZXJjb25hLmNvbS9zc2wtaW50ZXJuYWwtaGFzaCI6Ijc1MWU1OWExYzFkOTRhYzY3ZjlkNmJiNmU3NTZhY2RjIn19LCJzcGVjIjp7InZvbHVtZXMiOlt7Im5hbWUiOiJzc2wtaW50ZXJuYWwiLCJzZWNyZXQiOnsic2VjcmV0TmFtZSI6InNvbWUtbmFtZS1zc2wtaW50ZXJuYWwiLCJvcHRpb25hbCI6dHJ1ZX19LHsibmFtZSI6InNzbCIsInNlY3JldCI6eyJzZWNyZXROYW1lIjoic29tZS1uYW1lLXNzbCIsIm9wdGlvbmFsIjpmYWxzZX19LHsibmFtZSI6ImNvbmZpZyIsImNvbmZpZ01hcCI6eyJuYW1lIjoic29tZS1uYW1lLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fSx7Im5hbWUiOiJiaW4iLCJlbXB0eURpciI6e319XSwiaW5pdENvbnRhaW5lcnMiOlt7Im5hbWUiOiJweGMtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzMzAtODI5M2YwNzEiLCJjb21tYW5kIjpbIi9weGMtaW5pdC1lbnRyeXBvaW50LnNoIl0sInJlc291cmNlcyI6eyJsaW1pdHMiOnsiY3B1IjoiNTBtIiwibWVtb3J5IjoiNTBNIn19LCJ2b2x1bWVNb3VudHMiOlt7Im5hbWUiOiJiaW4iLCJtb3VudFBhdGgiOiIvdmFyL2xpYi9teXNxbCJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn0seyJuYW1lIjoicHJveHlzcWwtaW5pdCIsImltYWdlIjoicGVyY29uYWxhYi9wZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yOlBSLTIzMzAtODI5M2YwNzEiLCJjb21tYW5kIjpbIi9wcm94eXNxbC1pbml0LWVudHJ5cG9pbnQuc2giXSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI1MG0iLCJtZW1vcnkiOiI1ME0ifX0sInZvbHVtZU1vdW50cyI6W3sibmFtZSI6ImJpbiIsIm1vdW50UGF0aCI6Ii9vcHQvcGVyY29uYSJ9XSwiaW1hZ2VQdWxsUG9saWN5IjoiQWx3YXlzIn1dLCJjb250YWluZXJzIjpbeyJuYW1lIjoicHJveHlzcWwiLCJpbWFnZSI6InBlcmNvbmFsYWIvcGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvcjptYWluLXByb3h5c3FsIiwiY29tbWFuZCI6WyIvb3B0L3BlcmNvbmEvcHJveHlzcWwtZW50cnlwb2ludC5zaCJdLCJhcmdzIjpbInByb3h5c3FsIiwiLWYiLCItYyIsIi9ldGMvcHJveHlzcWwvcHJveHlzcWwuY25mIiwiLS1yZWxvYWQiXSwicG9ydHMiOlt7Im5hbWUiOiJteXNxbCIsImNvbnRhaW5lclBvcnQiOjMzMDZ9LHsibmFtZSI6InByb3h5YWRtIiwiY29udGFpbmVyUG9ydCI6NjAzMn0seyJuYW1lIjoic3RhdHMiLCJjb250YWluZXJQb3J0Ijo2MDcwfV0sImVudkZyb20iOlt7InNlY3JldFJlZiI6eyJuYW1lIjoic29tZS1uYW1lLWVudi12YXJzLXByb3h5c3FsIiwib3B0aW9uYWwiOnRydWV9fV0sImVudiI6W3sibmFtZSI6IlBYQ19TRVJWSUNFIiwidmFsdWUiOiJzb21lLW5hbWUtcHhjIn0seyJuYW1lIjoiT1BFUkFUT1JfUEFTU1dPUkQiLCJ2YWx1ZUZyb20iOnsic2VjcmV0S2V5UmVmIjp7Im5hbWUiOiJpbnRlcm5hbC1zb21lLW5hbWUiLCJrZXkiOiJvcGVyYXRvciJ9fX0seyJuYW1lIjoiUFJPWFlfQURNSU5fVVNFUiIsInZhbHVlIjoicHJveHlhZG1pbiJ9LHsibmFtZSI6IlBST1hZX0FETUlOX1BBU1NXT1JEIiwidmFsdWVGcm9tIjp7InNlY3JldEtleVJlZiI6eyJuYW1lIjoiaW50ZXJuYWwtc29tZS1uYW1lIiwia2V5IjoicHJveHlhZG1pbiJ9fX0seyJuYW1lIjoiTU9OSVRPUl9QQVNTV09SRCIsInZhbHVlRnJvbSI6eyJzZWNyZXRLZXlSZWYiOnsibmFtZSI6ImludGVybmFsLXNvbWUtbmFtZSIsImtleSI6Im1vbml0b3IifX19LHsibmFtZSI6IlNDSEVEVUxFUl9DSEVDS1RJTUVPVVQiLCJ2YWx1ZSI6IjIwMDAifSx7Im5hbWUiOiJTQ0hFRFVMRVJfV1JJVEVSQUxTT1JFQURFUiIsInZhbHVlIjoiMSJ9LHsibmFtZSI6IlNDSEVEVUxFUl9SRVRSWVVQIiwidmFsdWUiOiIxIn0seyJuYW1lIjoiU0NIRURVTEVSX1JFVFJZRE9XTiIsInZhbHVlIjoiMyJ9LHsibmFtZSI6IlNDSEVEVUxFUl9QSU5HVElNRU9VVCIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9OT0RFQ0hFQ0tJTlRFUlZBTCIsInZhbHVlIjoiMjAwMCJ9LHsibmFtZSI6IlNDSEVEVUxFUl9NQVhDT05ORUNUSU9OUyIsInZhbHVlIjoiMTAwMCJ9LHsibmFtZSI6IlBFUkNPTkFfU0NIRURVTEVSX0NGRyIsInZhbHVlIjoiL3RtcC9zY2hlZHVsZXItY29uZmlnLnRvbWwifSx7Im5hbWUiOiJTQ0hFRFVMRVJfRU5BQkxFRCIsInZhbHVlIjoidHJ1ZSJ9XSwicmVzb3VyY2VzIjp7ImxpbWl0cyI6eyJjcHUiOiI3MDBtIiwibWVtb3J5IjoiMUcifSwicmVxdWVzdHMiOnsiY3B1IjoiMTAwbSIsIm1lbW9y"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6NSwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6NSwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, -  ReadyReplicas: 5, +  Replicas: 0, -  Replicas: 2, -  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, -  Replicas: 5, -  Replicas: &5, +  Replicas: &5, +  ResourceVersion: "", -  ResourceVersion: "1767020059868335023", -  ResourceVersion: "1767020515535423023", -  ResourceVersion: "1767020680814207023", -  ResourceVersion: "1767020792407615024", -  ResourceVersion: "1767020951241359024", -  ResourceVersion: "1767021101466207023", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  "-service=$(PROXYSQL_SERVICE)", "-protocol=$(PEER_LIST_SRV_PROTOCOL)", +  "-service=$(PROXYSQL_SERVICE)", "-protocol=$(PEER_LIST_SRV_PROTOCOL)", -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2025-12-29 14:53:39 +0000 UTC", -  Time: s"2025-12-29 14:54:19 +0000 UTC", -  Time: s"2025-12-29 15:01:21 +0000 UTC", -  Time: s"2025-12-29 15:01:55 +0000 UTC", -  Time: s"2025-12-29 15:03:57 +0000 UTC", -  Time: s"2025-12-29 15:04:40 +0000 UTC", -  Time: s"2025-12-29 15:06:32 +0000 UTC", -  Time: s"2025-12-29 15:06:46 +0000 UTC", -  Time: s"2025-12-29 15:09:11 +0000 UTC", -  Time: s"2025-12-29 15:11:21 +0000 UTC", -  Time: s"2025-12-29 15:11:41 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "3823e520-aee2-4561-8a8c-6299dbaaf2d4", -  UID: "ca903740-c64d-4a7f-8e02-834b17f01032", +  UpdatedReplicas: 0, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, -  UpdatedReplicas: 5, +  UpdateRevision: "", -  UpdateRevision: "some-name-proxysql-669d589b5b", -  UpdateRevision: "some-name-proxysql-75c5fd5d74", -  UpdateRevision: "some-name-pxc-7f48d99997", +  Value: "0", -  Value: "1", -  VolumeMode: &"Filesystem", +  VolumeMode: nil, -  VolumeMounts: []v1.VolumeMount{{Name: "bin", MountPath: "/opt/percona"}}, +  VolumeMounts: []v1.VolumeMount{{Name: "bin", MountPath: "/opt/percona"}},   }    },    },    {    },    },    {    },    },    {    },    ... // 11 identical elements    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical elements    ... // 4 identical fields    ... // 5 identical elements    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 8 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: nil,    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    Containers: []v1.Container{    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...},    Env: []v1.EnvVar{    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostIP: "",    HostPort: 0,    ImagePullPolicy: "Always",    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    Labels: nil,    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-proxysql"},    LocalObjectReference: {Name: "some-name-pxc"},    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    Name: "config",    Name: "ist",    {Name: "MONITOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "monitor"}}},    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    Name: "mysql-users-secret-file",    Name: "mysqlx",    {Name: "PERCONA_SCHEDULER_CFG", Value: "/tmp/scheduler-config.toml"},    Name: "proxyadm",    {Name: "SCHEDULER_CHECKTIMEOUT", Value: "2000"},    {Name: "SCHEDULER_MAXCONNECTIONS", Value: "1000"},    {Name: "SCHEDULER_RETRYDOWN", Value: "3"},    {Name: "SCHEDULER_RETRYUP", Value: "1"},    Name: "SCHEDULER_WRITERALSOREADER",    Namespace: "proxysql-scheduler-7615",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "c00e89d30174d6aa7ad8388d7e21a27b", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "51f7750d-6014-44d5-b14c-91710e60d898", ...}},    Ports: nil,    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    Replicas: &2,    Replicas: &3,    ResizePolicy: nil,    Resources: {},    Resources: {Limits: {s"cpu": {i: {...}, s: "700m", Format: "DecimalSI"}, s"memory": {i: {...}, s: "1G", Format: "DecimalSI"}}, Requests: {s"cpu": {i: {...}, s: "100m", Format: "DecimalSI"}, s"memory": {i: {...}, Format: "DecimalSI"}}},    SecretName: "internal-some-name",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    ValueFrom: nil,    VolumeAttributesClassName: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + kubectl patch pxc -n proxysql-scheduler-7615 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.vRmUTlOE8q ++ mktemp + local LAST_ERR=/tmp/tmp.q9ILFrNfoJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vRmUTlOE8q perconaxtradbcluster.pxc.percona.com "some-name" deleted from proxysql-scheduler-7615 namespace + cat /tmp/tmp.q9ILFrNfoJ + rm /tmp/tmp.vRmUTlOE8q /tmp/tmp.q9ILFrNfoJ + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.aGm01HkAhf ++ mktemp + local LAST_ERR=/tmp/tmp.ZGCcTgB1mI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aGm01HkAhf No resources found + cat /tmp/tmp.ZGCcTgB1mI + rm /tmp/tmp.aGm01HkAhf /tmp/tmp.ZGCcTgB1mI + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.9XoxfXbO9w ++ mktemp + local LAST_ERR=/tmp/tmp.ScSXQ1msvg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9XoxfXbO9w No resources found + cat /tmp/tmp.ScSXQ1msvg + rm /tmp/tmp.9XoxfXbO9w /tmp/tmp.ScSXQ1msvg + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.BS0BrniHyA ++ mktemp + local LAST_ERR=/tmp/tmp.Dw1uSfyScW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BS0BrniHyA validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.Dw1uSfyScW + rm /tmp/tmp.BS0BrniHyA /tmp/tmp.Dw1uSfyScW + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace proxysql-scheduler-7615 + rm -rf /tmp/tmp.qG7R1JVFpB + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.zC4eOlLagK ++ mktemp + local LAST_OUT=/tmp/tmp.DcQh2agnKo ++ mktemp + local LAST_ERR=/tmp/tmp.mp7oZVbyKt + local exit_status=0 + local LAST_ERR=/tmp/tmp.TY22uUQMrl + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace proxysql-scheduler-7615 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator