Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/logs/proxysql-scheduler-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + cluster=some-name + create_infra proxysql-scheduler-17053 + local ns=proxysql-scheduler-17053 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n proxysql-scheduler-4261 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.fKIEZsncWL ++ mktemp + local LAST_ERR=/tmp/tmp.mkrSXuWEYy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fKIEZsncWL perconaxtradbcluster.pxc.percona.com "some-name" deleted from proxysql-scheduler-4261 namespace + cat /tmp/tmp.mkrSXuWEYy + rm /tmp/tmp.fKIEZsncWL /tmp/tmp.mkrSXuWEYy + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.hBiAMltnni ++ mktemp + local LAST_ERR=/tmp/tmp.vefYMNoZcM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hBiAMltnni No resources found + cat /tmp/tmp.vefYMNoZcM + rm /tmp/tmp.hBiAMltnni /tmp/tmp.vefYMNoZcM + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.8M8Trh3bXX ++ mktemp + local LAST_ERR=/tmp/tmp.KyQMQ7efWY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8M8Trh3bXX No resources found + cat /tmp/tmp.KyQMQ7efWY + rm /tmp/tmp.8M8Trh3bXX /tmp/tmp.KyQMQ7efWY + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ tail -n1 ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + awk '{print$1}' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.jS9sAV26J1 ++ mktemp + local LAST_ERR=/tmp/tmp.5hgjXs1N3y + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + local LAST_OUT=/tmp/tmp.hXKXxdeHMC ++ mktemp + local LAST_ERR=/tmp/tmp.EcxpbyIPht + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hXKXxdeHMC + cat /tmp/tmp.EcxpbyIPht + rm /tmp/tmp.hXKXxdeHMC /tmp/tmp.EcxpbyIPht + return 0 namespace "proxysql-scheduler-4261" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jS9sAV26J1 namespace "pxc-operator" deleted + cat /tmp/tmp.5hgjXs1N3y + rm /tmp/tmp.jS9sAV26J1 /tmp/tmp.5hgjXs1N3y + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.2KcKzWN5tK ++ mktemp + local LAST_ERR=/tmp/tmp.ZkbOoqZAdT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2KcKzWN5tK namespace/pxc-operator created + cat /tmp/tmp.ZkbOoqZAdT + rm /tmp/tmp.2KcKzWN5tK /tmp/tmp.ZkbOoqZAdT + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.nWgji1fBrN +++ mktemp ++ local LAST_ERR=/tmp/tmp.duEI8Xo53o ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nWgji1fBrN ++ cat /tmp/tmp.duEI8Xo53o ++ rm /tmp/tmp.nWgji1fBrN /tmp/tmp.duEI8Xo53o ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2290-96398578-2-cluster7 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.UmA3Z99gyv ++ mktemp + local LAST_ERR=/tmp/tmp.zMKiZKQJty + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2290-96398578-2-cluster7 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UmA3Z99gyv Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2290-96398578-2-cluster7" modified. + cat /tmp/tmp.zMKiZKQJty + rm /tmp/tmp.UmA3Z99gyv /tmp/tmp.zMKiZKQJty + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.1v2swcyXWB ++ mktemp + local LAST_ERR=/tmp/tmp.N9hakMM6h9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1v2swcyXWB customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.N9hakMM6h9 + rm /tmp/tmp.1v2swcyXWB /tmp/tmp.N9hakMM6h9 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/deploy/cw-rbac.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.m3iuzA3ChP ++ mktemp + local LAST_ERR=/tmp/tmp.UaOl8ni47O + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.m3iuzA3ChP clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.UaOl8ni47O + rm /tmp/tmp.m3iuzA3ChP /tmp/tmp.UaOl8ni47O + return 0 + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2290-96398578^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.yNghPfO7H6 ++ mktemp + local LAST_ERR=/tmp/tmp.0B4n3pCsOV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yNghPfO7H6 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.0B4n3pCsOV + rm /tmp/tmp.yNghPfO7H6 /tmp/tmp.0B4n3pCsOV + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.q4RE9lb15e ++ mktemp + local LAST_ERR=/tmp/tmp.IXIW03EUxi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.q4RE9lb15e pod/percona-xtradb-cluster-operator-7b6f7d984c-ds97d condition met + cat /tmp/tmp.IXIW03EUxi + rm /tmp/tmp.q4RE9lb15e /tmp/tmp.IXIW03EUxi + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.N0yZ4ahkSG +++ mktemp ++ local LAST_ERR=/tmp/tmp.VnjigKpU9U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N0yZ4ahkSG ++ cat /tmp/tmp.VnjigKpU9U ++ rm /tmp/tmp.N0yZ4ahkSG /tmp/tmp.VnjigKpU9U ++ return 0 + wait_pod percona-xtradb-cluster-operator-7b6f7d984c-ds97d 480 pxc-operator + local pod=percona-xtradb-cluster-operator-7b6f7d984c-ds97d + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-7b6f7d984c-ds97d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-7b6f7d984c-ds97d condition met waiting for pod/percona-xtradb-cluster-operator-7b6f7d984c-ds97d to become Ready.Ok + sleep 3 + create_namespace proxysql-scheduler-17053 + local namespace=proxysql-scheduler-17053 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + '[' -n '' ']' + desc 'cleaned up old namespaces proxysql-scheduler-17053' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces proxysql-scheduler-17053 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace proxysql-scheduler-17053 + awk '{print$1}' ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.wu83hexFn4 + local LAST_OUT=/tmp/tmp.H8nz57fbGu ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.jTSkjt3eMp + local exit_status=0 + local LAST_ERR=/tmp/tmp.EhIrt1rYFn + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-17053 + for i in '$(seq 0 2)' + set +e + kubectl get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.H8nz57fbGu + cat /tmp/tmp.EhIrt1rYFn + rm /tmp/tmp.H8nz57fbGu /tmp/tmp.EhIrt1rYFn + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-17053 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-17053 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.wu83hexFn4 + cat /tmp/tmp.jTSkjt3eMp Error from server (NotFound): namespaces "proxysql-scheduler-17053" not found + rm /tmp/tmp.wu83hexFn4 /tmp/tmp.jTSkjt3eMp + return 1 + : + wait_for_delete namespace/proxysql-scheduler-17053 + local res=namespace/proxysql-scheduler-17053 + echo -n 'waiting for namespace/proxysql-scheduler-17053 to be deleted' waiting for namespace/proxysql-scheduler-17053 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "proxysql-scheduler-17053" not found + desc 'create namespace proxysql-scheduler-17053' + set +o xtrace ----------------------------------------------------------------------------------- create namespace proxysql-scheduler-17053 ----------------------------------------------------------------------------------- + kubectl_bin create namespace proxysql-scheduler-17053 ++ mktemp + local LAST_OUT=/tmp/tmp.u1hVGQCot5 ++ mktemp + local LAST_ERR=/tmp/tmp.8Ur8UvV8uQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace proxysql-scheduler-17053 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.u1hVGQCot5 namespace/proxysql-scheduler-17053 created + cat /tmp/tmp.8Ur8UvV8uQ + rm /tmp/tmp.u1hVGQCot5 /tmp/tmp.8Ur8UvV8uQ + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.JX6MdE9iYP +++ mktemp ++ local LAST_ERR=/tmp/tmp.aKfIv9rjN0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JX6MdE9iYP ++ cat /tmp/tmp.aKfIv9rjN0 ++ rm /tmp/tmp.JX6MdE9iYP /tmp/tmp.aKfIv9rjN0 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2290-96398578-2-cluster7 --namespace=proxysql-scheduler-17053 ++ mktemp + local LAST_OUT=/tmp/tmp.WwfvUQKiQj ++ mktemp + local LAST_ERR=/tmp/tmp.J0lzxKZlfW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2290-96398578-2-cluster7 --namespace=proxysql-scheduler-17053 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WwfvUQKiQj Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2290-96398578-2-cluster7" modified. + cat /tmp/tmp.J0lzxKZlfW + rm /tmp/tmp.WwfvUQKiQj /tmp/tmp.J0lzxKZlfW + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.84H4LbT45R ++ mktemp + local LAST_ERR=/tmp/tmp.WazydSbRVq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.84H4LbT45R secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.WazydSbRVq + rm /tmp/tmp.84H4LbT45R /tmp/tmp.WazydSbRVq + return 0 + desc 'create PXC cluster: some-name' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster: some-name ----------------------------------------------------------------------------------- + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.NwIiOnVhpb ++ mktemp + local LAST_ERR=/tmp/tmp.VZOzqVQcF3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NwIiOnVhpb secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.VZOzqVQcF3 + rm /tmp/tmp.NwIiOnVhpb /tmp/tmp.VZOzqVQcF3 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/client.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2290-96398578#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.nQ8IlByj6M + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.proxysql-scheduler-17053~ + local LAST_ERR=/tmp/tmp.qpGpWKFFVs + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nQ8IlByj6M deployment.apps/pxc-client created + cat /tmp/tmp.qpGpWKFFVs + rm /tmp/tmp.nQ8IlByj6M /tmp/tmp.qpGpWKFFVs + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/conf/some-name.yml + local pvc_name= + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + kubectl_bin apply -f - + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' ++ mktemp + local LAST_OUT=/tmp/tmp.Q4q27xNM9w ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2290-96398578#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.proxysql-scheduler-17053~ + local LAST_ERR=/tmp/tmp.faljlbRMQD + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local exit_status=0 + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/conf/some-name.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Q4q27xNM9w perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.faljlbRMQD + rm /tmp/tmp.Q4q27xNM9w /tmp/tmp.faljlbRMQD + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.UFQjRbvBil ++++ mktemp +++ local LAST_ERR=/tmp/tmp.sIqW3tEmtW +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.UFQjRbvBil +++ cat /tmp/tmp.sIqW3tEmtW +++ rm /tmp/tmp.UFQjRbvBil /tmp/tmp.sIqW3tEmtW +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.qpXk2RPeQj ++++ mktemp +++ local LAST_ERR=/tmp/tmp.9hL6W1KhUR +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.qpXk2RPeQj +++ cat /tmp/tmp.9hL6W1KhUR +++ rm /tmp/tmp.qpXk2RPeQj /tmp/tmp.9hL6W1KhUR +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-17053 ++ mktemp + local LAST_OUT=/tmp/tmp.4toc7TFQm9 ++ mktemp + local LAST_ERR=/tmp/tmp.dVqRlierlJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-17053 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-17053 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-17053 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.4toc7TFQm9 + cat /tmp/tmp.dVqRlierlJ error: no matching resources found + rm /tmp/tmp.4toc7TFQm9 /tmp/tmp.dVqRlierlJ + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-pxc-1 ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo some-name-pxc-2 + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.E3v7LUqqgp +++ mktemp ++ local LAST_ERR=/tmp/tmp.vF4mhR5PDq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E3v7LUqqgp ++ cat /tmp/tmp.vF4mhR5PDq ++ rm /tmp/tmp.E3v7LUqqgp /tmp/tmp.vF4mhR5PDq ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j48jUFtBf5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ArOewO7sOt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j48jUFtBf5 ++ cat /tmp/tmp.ArOewO7sOt ++ rm /tmp/tmp.j48jUFtBf5 /tmp/tmp.ArOewO7sOt ++ return 0 + client_pod=pxc-client-59944c5bbf-q2fpx + wait_pod pxc-client-59944c5bbf-q2fpx + local pod=pxc-client-59944c5bbf-q2fpx + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-q2fpx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-q2fpx condition met waiting for pod/pxc-client-59944c5bbf-q2fpx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PQv3pYAkS6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.xnqL3R3ZFZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PQv3pYAkS6 ++ cat /tmp/tmp.xnqL3R3ZFZ ++ rm /tmp/tmp.PQv3pYAkS6 /tmp/tmp.xnqL3R3ZFZ ++ return 0 + client_pod=pxc-client-59944c5bbf-q2fpx + wait_pod pxc-client-59944c5bbf-q2fpx + local pod=pxc-client-59944c5bbf-q2fpx + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-q2fpx ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-q2fpx condition met waiting for pod/pxc-client-59944c5bbf-q2fpx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E74LO2zATk +++ mktemp ++ local LAST_ERR=/tmp/tmp.kD4S3YwNJW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E74LO2zATk ++ cat /tmp/tmp.kD4S3YwNJW ++ rm /tmp/tmp.E74LO2zATk /tmp/tmp.kD4S3YwNJW ++ return 0 + client_pod=pxc-client-59944c5bbf-q2fpx + wait_pod pxc-client-59944c5bbf-q2fpx + local pod=pxc-client-59944c5bbf-q2fpx + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-59944c5bbf-q2fpx ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-q2fpx condition met waiting for pod/pxc-client-59944c5bbf-q2fpx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.m3LeJ6naj1/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.m3LeJ6naj1/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.m3LeJ6naj1/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P5XULIaKtA +++ mktemp ++ local LAST_ERR=/tmp/tmp.mqlWtg7NX7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P5XULIaKtA ++ cat /tmp/tmp.mqlWtg7NX7 ++ rm /tmp/tmp.P5XULIaKtA /tmp/tmp.mqlWtg7NX7 ++ return 0 + client_pod=pxc-client-59944c5bbf-q2fpx + wait_pod pxc-client-59944c5bbf-q2fpx + local pod=pxc-client-59944c5bbf-q2fpx + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-59944c5bbf-q2fpx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-q2fpx condition met waiting for pod/pxc-client-59944c5bbf-q2fpx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.m3LeJ6naj1/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.m3LeJ6naj1/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.m3LeJ6naj1/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.liTDLeSBtE +++ mktemp ++ local LAST_ERR=/tmp/tmp.2GVbYj5OOu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.liTDLeSBtE ++ cat /tmp/tmp.2GVbYj5OOu ++ rm /tmp/tmp.liTDLeSBtE /tmp/tmp.2GVbYj5OOu ++ return 0 + client_pod=pxc-client-59944c5bbf-q2fpx + wait_pod pxc-client-59944c5bbf-q2fpx + local pod=pxc-client-59944c5bbf-q2fpx + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-59944c5bbf-q2fpx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-q2fpx condition met waiting for pod/pxc-client-59944c5bbf-q2fpx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.m3LeJ6naj1/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.m3LeJ6naj1/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.m3LeJ6naj1/select-1.sql + is_keyring_plugin_in_use some-name + local cluster=some-name + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + grep -E -o 'early-plugin-load=keyring_\w+.so' + kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ mktemp + local LAST_OUT=/tmp/tmp.m9ONKvvWVS ++ mktemp + local LAST_ERR=/tmp/tmp.7HL5N5RkJN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.m9ONKvvWVS + cat /tmp/tmp.7HL5N5RkJN Unable to use a TTY - input is not a terminal or the right kind of file + rm /tmp/tmp.m9ONKvvWVS /tmp/tmp.7HL5N5RkJN + return 0 + return 1 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6nfpuskN7E +++ mktemp ++ local LAST_ERR=/tmp/tmp.8S9Eclabys ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6nfpuskN7E ++ cat /tmp/tmp.8S9Eclabys ++ rm /tmp/tmp.6nfpuskN7E /tmp/tmp.8S9Eclabys ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MHlKnFGvxa +++ mktemp ++ local LAST_ERR=/tmp/tmp.kUCxoBEHIB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MHlKnFGvxa ++ cat /tmp/tmp.kUCxoBEHIB ++ rm /tmp/tmp.MHlKnFGvxa /tmp/tmp.kUCxoBEHIB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.gpxe6WFVHZ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.c6aE40JC1d +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.gpxe6WFVHZ +++++ cat /tmp/tmp.c6aE40JC1d +++++ rm /tmp/tmp.gpxe6WFVHZ /tmp/tmp.c6aE40JC1d +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.dYD4cmTj8C ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.3O6axVVWyn +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.dYD4cmTj8C +++++ cat /tmp/tmp.3O6axVVWyn +++++ rm /tmp/tmp.dYD4cmTj8C /tmp/tmp.3O6axVVWyn +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P6PJX9pGSh +++ mktemp ++ local LAST_ERR=/tmp/tmp.djI6ktDSDU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P6PJX9pGSh ++ cat /tmp/tmp.djI6ktDSDU ++ rm /tmp/tmp.P6PJX9pGSh /tmp/tmp.djI6ktDSDU ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-pxc + local resource=statefulset/some-name-pxc + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc.yml + local new_result=/tmp/tmp.m3LeJ6naj1/statefulset_some-name-pxc.yml + desc 'compare statefulset/some-name-pxc-' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/some-name-pxc- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ bc -l ++ echo '1.31 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k129.yml ']' + version_gt 1.27 ++ bc -l ++ echo '1.31 >= 1.27' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k127.yml ']' + version_gt 1.24 ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k124.yml ']' + version_gt 1.22 ++ bc -l ++ echo '1.31 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-oc.yml ']' + version_gt 1.29 ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-aks.yml ']' + kubectl_bin get -o yaml statefulset/some-name-pxc ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-17053", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.ksRZeMauGa ++ mktemp + local LAST_ERR=/tmp/tmp.cEcfcnWs54 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-pxc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ksRZeMauGa + cat /tmp/tmp.cEcfcnWs54 + rm /tmp/tmp.ksRZeMauGa /tmp/tmp.cEcfcnWs54 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc.yml /tmp/tmp.m3LeJ6naj1/statefulset_some-name-pxc.yml + log 'compare_kubectl: statefulset/some-name-pxc OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:08:26+0000]' compare_kubectl: statefulset/some-name-pxc OK [2025-12-05T10:08:26+0000] compare_kubectl: statefulset/some-name-pxc OK + compare_kubectl statefulset/some-name-proxysql + local resource=statefulset/some-name-proxysql + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql.yml + local new_result=/tmp/tmp.m3LeJ6naj1/statefulset_some-name-proxysql.yml + desc 'compare statefulset/some-name-proxysql-' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/some-name-proxysql- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ bc -l ++ echo '1.31 >= 1.33' + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k129.yml ']' + version_gt 1.27 ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k127.yml ']' + version_gt 1.24 ++ bc -l ++ echo '1.31 >= 1.24' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k124.yml ']' + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-oc.yml ']' + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-aks.yml ']' + kubectl_bin get -o yaml statefulset/some-name-proxysql ++ mktemp + local LAST_OUT=/tmp/tmp.YKLtQ0viHS + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-17053", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_ERR=/tmp/tmp.H8TYHEcM92 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YKLtQ0viHS + cat /tmp/tmp.H8TYHEcM92 + rm /tmp/tmp.YKLtQ0viHS /tmp/tmp.H8TYHEcM92 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql.yml /tmp/tmp.m3LeJ6naj1/statefulset_some-name-proxysql.yml + log 'compare_kubectl: statefulset/some-name-proxysql OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:08:28+0000]' compare_kubectl: statefulset/some-name-proxysql OK [2025-12-05T10:08:28+0000] compare_kubectl: statefulset/some-name-proxysql OK + compare_kubectl service/some-name-pxc + local resource=service/some-name-pxc + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc.yml + local new_result=/tmp/tmp.m3LeJ6naj1/service_some-name-pxc.yml + desc 'compare service/some-name-pxc-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-pxc- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k129.yml ']' + version_gt 1.27 ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k127.yml ']' + version_gt 1.24 ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k124.yml ']' + version_gt 1.22 ++ bc -l ++ echo '1.31 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k122.yml ']' + version_gt 1.21 ++ bc -l ++ echo '1.31 >= 1.21' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-oc.yml ']' + version_gt 1.29 ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-17053", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml service/some-name-pxc ++ mktemp + local LAST_OUT=/tmp/tmp.1YqthKrLzE ++ mktemp + local LAST_ERR=/tmp/tmp.KiIeWIQWvK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-pxc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1YqthKrLzE + cat /tmp/tmp.KiIeWIQWvK + rm /tmp/tmp.1YqthKrLzE /tmp/tmp.KiIeWIQWvK + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc.yml /tmp/tmp.m3LeJ6naj1/service_some-name-pxc.yml + log 'compare_kubectl: service/some-name-pxc OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:08:29+0000]' compare_kubectl: service/some-name-pxc OK [2025-12-05T10:08:29+0000] compare_kubectl: service/some-name-pxc OK + compare_kubectl service/some-name-proxysql + local resource=service/some-name-proxysql + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql.yml + local new_result=/tmp/tmp.m3LeJ6naj1/service_some-name-proxysql.yml + desc 'compare service/some-name-proxysql-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-proxysql- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k129.yml ']' + version_gt 1.27 ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k127.yml ']' + version_gt 1.24 ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k124.yml ']' + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-oc.yml ']' + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-aks.yml ']' + kubectl_bin get -o yaml service/some-name-proxysql ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-17053", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.yehIqesvuN ++ mktemp + local LAST_ERR=/tmp/tmp.41axNQeQni + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yehIqesvuN + cat /tmp/tmp.41axNQeQni + rm /tmp/tmp.yehIqesvuN /tmp/tmp.41axNQeQni + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql.yml /tmp/tmp.m3LeJ6naj1/service_some-name-proxysql.yml + log 'compare_kubectl: service/some-name-proxysql OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:08:30+0000]' compare_kubectl: service/some-name-proxysql OK [2025-12-05T10:08:30+0000] compare_kubectl: service/some-name-proxysql OK + compare_kubectl service/some-name-proxysql-unready + local resource=service/some-name-proxysql-unready + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready.yml + local new_result=/tmp/tmp.m3LeJ6naj1/service_some-name-proxysql-unready.yml + desc 'compare service/some-name-proxysql-unready-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-proxysql-unready- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k129.yml ']' + version_gt 1.27 ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k127.yml ']' + version_gt 1.24 ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k124.yml ']' + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-oc.yml ']' + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-aks.yml ']' + kubectl_bin get -o yaml service/some-name-proxysql-unready ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-17053", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.rrljkgeL53 ++ mktemp + local LAST_ERR=/tmp/tmp.ZIkVGppBsi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-proxysql-unready + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rrljkgeL53 + cat /tmp/tmp.ZIkVGppBsi + rm /tmp/tmp.rrljkgeL53 /tmp/tmp.ZIkVGppBsi + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready.yml /tmp/tmp.m3LeJ6naj1/service_some-name-proxysql-unready.yml + log 'compare_kubectl: service/some-name-proxysql-unready OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:08:31+0000]' compare_kubectl: service/some-name-proxysql-unready OK [2025-12-05T10:08:31+0000] compare_kubectl: service/some-name-proxysql-unready OK + sleep 120 + desc 'check if scheduler is enabled in all ProxySQL servers' + set +o xtrace ----------------------------------------------------------------------------------- check if scheduler is enabled in all ProxySQL servers ----------------------------------------------------------------------------------- + compare_scheduler some-name-proxysql-0 scheduler-0 + local pod=some-name-proxysql-0 + local compare_file=scheduler-0 + compare_mysql_cmd_local scheduler-0 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=scheduler-0 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/scheduler-0-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.m3LeJ6naj1/scheduler-0.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql /tmp/tmp.m3LeJ6naj1/scheduler-0.sql + log 'scheduler is enabled in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:10:33+0000]' scheduler is enabled in some-name-proxysql-0: OK [2025-12-05T10:10:33+0000] scheduler is enabled in some-name-proxysql-0: OK + compare_scheduler some-name-proxysql-1 scheduler-1 + local pod=some-name-proxysql-1 + local compare_file=scheduler-1 + compare_mysql_cmd_local scheduler-1 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=scheduler-1 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/scheduler-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.m3LeJ6naj1/scheduler-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql /tmp/tmp.m3LeJ6naj1/scheduler-1.sql + log 'scheduler is enabled in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:10:36+0000]' scheduler is enabled in some-name-proxysql-1: OK [2025-12-05T10:10:36+0000] scheduler is enabled in some-name-proxysql-1: OK + desc 'check if scheduler is doing its job in all ProxySQL servers' + set +o xtrace ----------------------------------------------------------------------------------- check if scheduler is doing its job in all ProxySQL servers ----------------------------------------------------------------------------------- + compare_mysql_servers some-name-proxysql-0 mysql-servers-0 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0 + compare_mysql_cmd_local mysql-servers-0 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=mysql-servers-0 + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-0.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-0-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.m3LeJ6naj1/mysql-servers-0.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-0.sql /tmp/tmp.m3LeJ6naj1/mysql-servers-0.sql + log 'mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:10:38+0000]' mysql_servers are configured in some-name-proxysql-0: OK [2025-12-05T10:10:38+0000] mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1 + compare_mysql_cmd_local mysql-servers-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=mysql-servers-1 + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.m3LeJ6naj1/mysql-servers-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-1.sql /tmp/tmp.m3LeJ6naj1/mysql-servers-1.sql + log 'mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:10:41+0000]' mysql_servers are configured in some-name-proxysql-1: OK [2025-12-05T10:10:41+0000] mysql_servers are configured in some-name-proxysql-1: OK + desc 'check scaling PXC up' + set +o xtrace ----------------------------------------------------------------------------------- check scaling PXC up ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 5}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.tSUZn9ql3W ++ mktemp + local LAST_ERR=/tmp/tmp.PG6JHq4O8e + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 5}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tSUZn9ql3W perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.PG6JHq4O8e + rm /tmp/tmp.tSUZn9ql3W /tmp/tmp.PG6JHq4O8e + return 0 + wait_pod some-name-pxc-3 + local pod=some-name-pxc-3 + local max_retry=480 + local ns= ++ echo some-name-pxc-3 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-3 condition met waiting for pod/some-name-pxc-3 to become Ready.Ok + wait_pod some-name-pxc-4 + local pod=some-name-pxc-4 + local max_retry=480 + local ns= ++ echo some-name-pxc-4 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/some-name-pxc-4 condition met waiting for pod/some-name-pxc-4 to become Ready.Ok + sleep 120 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-1 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-1 + compare_mysql_cmd_local mysql-servers-0-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=mysql-servers-0-1 + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-0-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-0-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.m3LeJ6naj1/mysql-servers-0-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-0-1.sql /tmp/tmp.m3LeJ6naj1/mysql-servers-0-1.sql + log 'new mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:15:16+0000]' new mysql_servers are configured in some-name-proxysql-0: OK [2025-12-05T10:15:16+0000] new mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-1 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-1 + compare_mysql_cmd_local mysql-servers-1-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=mysql-servers-1-1 + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-1-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-1-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.m3LeJ6naj1/mysql-servers-1-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-1-1.sql /tmp/tmp.m3LeJ6naj1/mysql-servers-1-1.sql + log 'new mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:15:18+0000]' new mysql_servers are configured in some-name-proxysql-1: OK [2025-12-05T10:15:18+0000] new mysql_servers are configured in some-name-proxysql-1: OK + desc 'check scaling ProxySQL up' + set +o xtrace ----------------------------------------------------------------------------------- check scaling ProxySQL up ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"size": 3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ToplWvGYuP ++ mktemp + local LAST_ERR=/tmp/tmp.Zb807oq0re + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"size": 3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ToplWvGYuP perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.Zb807oq0re + rm /tmp/tmp.ToplWvGYuP /tmp/tmp.Zb807oq0re + return 0 + wait_pod some-name-proxysql-2 + local pod=some-name-proxysql-2 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-proxysql-2 + local container=proxysql + set +o xtrace pod/some-name-proxysql-2 condition met waiting for pod/some-name-proxysql-2 to become Ready.Ok + sleep 120 + compare_scheduler some-name-proxysql-2 scheduler-2 + local pod=some-name-proxysql-2 + local compare_file=scheduler-2 + compare_mysql_cmd_local scheduler-2 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + local command_id=scheduler-2 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/scheduler-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/scheduler-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.m3LeJ6naj1/scheduler-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/scheduler-2.sql /tmp/tmp.m3LeJ6naj1/scheduler-2.sql + log 'scheduler is enabled in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:17:44+0000]' scheduler is enabled in some-name-proxysql-2: OK [2025-12-05T10:17:44+0000] scheduler is enabled in some-name-proxysql-2: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2 + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2 + compare_mysql_cmd_local mysql-servers-2 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + local command_id=mysql-servers-2 + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 '' + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.m3LeJ6naj1/mysql-servers-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2290/e2e-tests/proxysql-scheduler/compare/mysql-servers-2.sql /tmp/tmp.m3LeJ6naj1/mysql-servers-2.sql + log 'mysql_servers are configured in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-12-05T10:17:46+0000]' mysql_servers are configured in some-name-proxysql-2: OK [2025-12-05T10:17:46+0000] mysql_servers are configured in some-name-proxysql-2: OK + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + destroy proxysql-scheduler-17053 + local namespace=proxysql-scheduler-17053 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v 'get backup status: Job.batch' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + grep -v level=info +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator + sort -u + tee /tmp/tmp.m3LeJ6naj1/operator.log + grep -v 'the object has been modified' ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.6MWagAsLoK +++ mktemp ++ local LAST_ERR=/tmp/tmp.eDM36slutJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6MWagAsLoK ++ cat /tmp/tmp.eDM36slutJ ++ rm /tmp/tmp.6MWagAsLoK /tmp/tmp.eDM36slutJ ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-7b6f7d984c-ds97d ++ mktemp + local LAST_OUT=/tmp/tmp.LskjAQd10w ++ mktemp + local LAST_ERR=/tmp/tmp.2CYZUkYz5V + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-7b6f7d984c-ds97d + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LskjAQd10w + cat /tmp/tmp.2CYZUkYz5V + rm /tmp/tmp.LskjAQd10w /tmp/tmp.2CYZUkYz5V + return 0 } }, }, { }, }, { }, }, { }, - }, - { - }, + }, ... // 16 identical fields ... // 16 identical fields 2025-12-05T10:02:09.133Z INFO setup Manager starting up {"gitCommit": "9639857870f62bc8fb82fd2b378da1c03deacd15", "gitBranch": "PR-2290-96398578", "buildTime": "2025-12-05T09:17:40Z", "goVersion": "go1.25.5", "os": "linux", "arch": "amd64"} 2025-12-05T10:02:09.133Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.14-gke.1033000"} 2025-12-05T10:02:09.137Z INFO setup Registering Components. 2025-12-05T10:02:09.722Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-12-05T10:02:09.722Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-12-05T10:02:09.722Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-12-05T10:02:09.722Z INFO controller-runtime.metrics Starting metrics server 2025-12-05T10:02:09.722Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-12-05T10:02:09.722Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-12-05T10:02:09.722Z INFO controller-runtime.webhook Starting webhook server 2025-12-05T10:02:09.722Z INFO setup Starting the Cmd. 2025-12-05T10:02:09.722Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-12-05T10:02:09.822Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-12-05T10:02:09.850Z DEBUG events percona-xtradb-cluster-operator-7b6f7d984c-ds97d_509b761b-7727-4f17-b0e1-10efb9bf4aaf became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"b1679d40-9431-416b-9dee-6dae50f28f22","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1764928929843103009"}, "reason": "LeaderElection"} 2025-12-05T10:02:09.850Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-12-05T10:02:09.850Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-12-05T10:02:09.850Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2025-12-05T10:02:09.850Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-12-05T10:02:09.850Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-12-05T10:02:09.950Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2025-12-05T10:02:09.950Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2025-12-05T10:02:09.950Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2025-12-05T10:02:09.950Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2025-12-05T10:02:09.950Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2025-12-05T10:02:09.950Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2025-12-05T10:02:59.293Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "debbd175-9447-4884-ae22-66a91ae076a3", "version": "1.19.0"} 2025-12-05T10:02:59.630Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "debbd175-9447-4884-ae22-66a91ae076a3", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-12-05T10:02:59.749Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "debbd175-9447-4884-ae22-66a91ae076a3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-05T10:02:59.793Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "debbd175-9447-4884-ae22-66a91ae076a3", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-12-05T10:02:59.860Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "debbd175-9447-4884-ae22-66a91ae076a3", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-05T10:02:59.896Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "debbd175-9447-4884-ae22-66a91ae076a3", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-05T10:02:59.924Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "debbd175-9447-4884-ae22-66a91ae076a3", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-05T10:02:59.998Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "debbd175-9447-4884-ae22-66a91ae076a3", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-12-05T10:03:00.883Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "a62ea884-b930-4718-b419-3b7396bd4758", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-05T10:03:00.900Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "a62ea884-b930-4718-b419-3b7396bd4758", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-12-05T10:04:16.999Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d953868b-f356-4455-8041-61f7833be631", "user": "operator"} 2025-12-05T10:04:17.028Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d953868b-f356-4455-8041-61f7833be631", "user": "monitor"} 2025-12-05T10:04:17.074Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d953868b-f356-4455-8041-61f7833be631"} 2025-12-05T10:04:17.114Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d953868b-f356-4455-8041-61f7833be631"} 2025-12-05T10:04:17.159Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d953868b-f356-4455-8041-61f7833be631", "user": "xtrabackup"} 2025-12-05T10:04:17.213Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d953868b-f356-4455-8041-61f7833be631"} 2025-12-05T10:04:17.240Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d953868b-f356-4455-8041-61f7833be631", "user": "replication"} 2025-12-05T10:04:17.248Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d953868b-f356-4455-8041-61f7833be631", "err": "get primary pxc pod: not found"} 2025-12-05T10:04:21.953Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "54af4663-9365-4207-aa5d-f633b83bde3e", "err": "get primary pxc pod: not found"} 2025-12-05T10:04:27.060Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "400e6092-f6e0-4b7e-9665-abc8bcb1c44b", "err": "get primary pxc pod: not found"} 2025-12-05T10:06:42.024Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "67ff62ab-5d88-4d08-90e0-272c4defe1f5", "user": "root"} 2025-12-05T10:06:42.140Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "67ff62ab-5d88-4d08-90e0-272c4defe1f5", "new version": "8.0.43-34.1"} 2025-12-05T10:06:44.642Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "67ff62ab-5d88-4d08-90e0-272c4defe1f5"} 2025-12-05T10:06:49.516Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "712ee18d-57ab-4802-b8ec-dd09fe9fc043"} 2025-12-05T10:06:55.159Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "1b2c9241-9fcd-463a-b9d5-fd197802ad18"} 2025-12-05T10:07:00.016Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "f7aa0f94-7735-486d-b54a-e307b8b4da9a"} 2025-12-05T10:07:05.540Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "16502a82-dd2b-4f0c-a98c-dce78ab0f6a4"} 2025-12-05T10:07:10.728Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "91a2be4d-8e8d-45ea-96d8-0e558840a6da"} 2025-12-05T10:07:16.187Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "a115a09d-63dc-4953-a75e-c3233fbeea75"} 2025-12-05T10:07:21.352Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "7187eda9-88cc-4826-ac61-fa4cdbca30e6"} 2025-12-05T10:07:26.622Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "26232c6d-3556-4c64-8f54-15e3fc4d7763"} 2025-12-05T10:07:31.822Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "172123d5-4155-4fe6-8378-743745beb3f5"} 2025-12-05T10:07:37.123Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "26044f3f-3845-46d5-963b-3c1cf4803021"} 2025-12-05T10:07:42.062Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "88be505a-a3fe-4e62-9175-7b4286e44ad2"} 2025-12-05T10:07:47.809Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "551475cd-7b4c-47de-9a52-cc30444e8205"} 2025-12-05T10:07:53.050Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "7f051201-62c6-40f5-bcd2-8a0a10fae215"} 2025-12-05T10:07:58.062Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "dc957227-c87f-4923-b47c-cb518af45dc2"} 2025-12-05T10:08:03.742Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "5145e504-94d2-4761-8f72-09f826910e1f"} 2025-12-05T10:08:08.959Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "bfa01ca8-c4bb-429a-90aa-2a0a860f8eb9"} 2025-12-05T10:08:14.333Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d46197f9-8932-4c2c-9f24-c2aa95b86dda"} 2025-12-05T10:08:19.646Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "2d14377b-4b5c-4c2a-a900-e0a8dc6404f1"} 2025-12-05T10:08:24.847Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "e286b7ca-f582-4bcd-9df0-dad59ee9fe30"} 2025-12-05T10:08:30.138Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "447a6af5-a9a1-4077-9649-f1954f578227"} 2025-12-05T10:08:35.416Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "fb26a487-9041-4359-bfdb-b15e2b7ab94d"} 2025-12-05T10:08:40.914Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "afa33455-7660-4b84-89c7-7606667ad034"} 2025-12-05T10:08:46.043Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "1066e2b6-73f7-475c-a0b6-1c8f7b705003"} 2025-12-05T10:08:51.441Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "1b3fd134-b9ca-41c5-b237-b1c05c00fdd4"} 2025-12-05T10:08:56.531Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "83f15f59-d31f-4109-bf37-f2ec53cd380d"} 2025-12-05T10:09:01.935Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "791aa9cc-aef7-4700-8495-f739ecf50345"} 2025-12-05T10:09:07.050Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "dac7b450-63ae-421c-8691-90bbbb19d485"} 2025-12-05T10:09:12.628Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "914feedc-9c35-469c-8332-c5de4e6cb271"} 2025-12-05T10:09:17.634Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "45606ac8-9427-47ad-be87-3c432f82f56a"} 2025-12-05T10:09:23.121Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "846f685a-554a-4665-b639-c7ab0cfdbc0d"} 2025-12-05T10:09:28.623Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d321a876-d9f0-4cf5-898b-dbfd115df817"} 2025-12-05T10:09:34.017Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "8909d81a-b914-4bf4-b77a-d44d8acd95da"} 2025-12-05T10:09:39.321Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "997a18bd-b342-4f32-a2ea-a747ad24d275"} 2025-12-05T10:09:44.655Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "3ca3ed7c-9317-4973-9f26-cffc85616e58"} 2025-12-05T10:09:49.845Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "73842e06-5f4e-48ec-b377-44ac3c48a444"} 2025-12-05T10:09:55.177Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "8598a453-2822-4354-a4dd-b7e6834d9c25"} 2025-12-05T10:10:00.333Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "01b94ebf-0556-49a4-8e7c-8c15975bd9c2"} 2025-12-05T10:10:05.521Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "529d0894-055b-4b25-a0aa-f6145b4effd0"} 2025-12-05T10:10:11.120Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "73e4f721-f78a-4540-8729-a52d5dad0fb6"} 2025-12-05T10:10:16.172Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d01d1fbf-928a-4b4e-8082-66585bdebc82"} 2025-12-05T10:10:21.520Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "63e13538-bd4f-4800-ab57-7b243e72251f"} 2025-12-05T10:10:26.723Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "eb657756-9417-436f-a39c-86021abe678a"} 2025-12-05T10:10:32.356Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "dbcd6e2e-d93c-4a9d-b085-60117fd56b75"} 2025-12-05T10:10:37.424Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "70b8a51f-9184-47e6-9f1b-c1b07e9e3bf2"} 2025-12-05T10:10:42.712Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "37ff5e1d-2f49-4373-9141-d772f058bfca"} 2025-12-05T10:10:43.100Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "366b9ffe-0dc0-4f94-a986-76ddb75131f3", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T10:10:43.149Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "366b9ffe-0dc0-4f94-a986-76ddb75131f3", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T10:10:45.948Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "366b9ffe-0dc0-4f94-a986-76ddb75131f3"} 2025-12-05T10:13:16.552Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "c29eb863-ac0d-4b41-b8bf-d2769b2cad9a"} 2025-12-05T10:13:20.638Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "7ef5c7b0-8cae-471d-b145-f918cf7ed211"} 2025-12-05T10:13:26.516Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d3732879-f4cd-4f2b-a75a-99fff6c32e3c"} 2025-12-05T10:13:31.814Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "dbb1611c-1a5a-4107-96a1-d4f316c32a14"} 2025-12-05T10:13:37.044Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "c41fccd0-854e-41fb-8f50-620aac4d0bf9"} 2025-12-05T10:13:42.629Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "1adb59dd-fb6a-46df-91d7-dbafe8805ef3"} 2025-12-05T10:13:47.920Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "c1daa63f-cce6-4b37-93da-2e25aff76f98"} 2025-12-05T10:13:53.356Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "2928adda-aaad-4864-be0e-7d0ce5e648e7"} 2025-12-05T10:13:58.620Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "786b3f68-cd81-4991-8c4f-699340273b47"} 2025-12-05T10:14:04.237Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "fcd8fcff-2d19-445d-8f32-8e2600c777a5"} 2025-12-05T10:14:09.536Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "30dc2118-4592-4e1b-882c-c7b3eb20d087"} 2025-12-05T10:14:14.834Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "e6f0c2dd-3107-4d96-8f48-b1e4ba85cce0"} 2025-12-05T10:14:20.254Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "39fccaa1-f018-4e4f-996e-07142fedf953"} 2025-12-05T10:14:25.724Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "13060015-f46e-4318-b3b7-168970766a27"} 2025-12-05T10:14:31.260Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "cc796013-bf65-4db1-87c0-a3bd0d0cdff7"} 2025-12-05T10:14:36.427Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d9589360-c805-4665-9520-d07781cd7dd0"} 2025-12-05T10:14:41.464Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "fe6d00c2-191b-4534-8a7a-7086b7c61493"} 2025-12-05T10:14:47.017Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "cc6281d7-90fe-4a9a-a642-2708735ac7c0"} 2025-12-05T10:14:52.323Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "07381d8a-f78c-4ad7-a884-f9de19d655c4"} 2025-12-05T10:14:57.859Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "39a0e1c6-3d34-4ff7-9eb5-c9073fe6b9df"} 2025-12-05T10:15:03.336Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "17d0d163-f73c-41fe-b7e5-5b83a9768ef9"} 2025-12-05T10:15:08.658Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "206bd4c6-700f-4db4-b46f-7485e701dd3a"} 2025-12-05T10:15:14.007Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "e72ecbd5-3429-4440-9bcb-a58ef8f6732e"} 2025-12-05T10:15:19.463Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "6b2ff4e3-a98a-4591-a528-c2400c88b42d"} 2025-12-05T10:15:19.889Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "92bd17b8-2691-47c4-8214-d3821bc5ff72", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T10:15:19.980Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "92bd17b8-2691-47c4-8214-d3821bc5ff72", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-12-05T10:15:23.817Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "92bd17b8-2691-47c4-8214-d3821bc5ff72", "error": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-05T10:15:51.431Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "7a1bd461-c9a3-4155-86c7-379e880aa792", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:855\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-12-05T10:15:54.483Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "8ac6ccc9-9775-4681-ac00-607a6f22ef63", "err": "get primary pxc pod: not found"} 2025-12-05T10:15:59.246Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "8ac6ccc9-9775-4681-ac00-607a6f22ef63"} 2025-12-05T10:16:03.821Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "1f5922b5-4450-4999-97f6-2d21ded6cc88"} 2025-12-05T10:16:09.158Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "21f1f0ae-dfe7-4e97-bfee-7cb92aef126d"} 2025-12-05T10:16:14.423Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "ab7f6834-0b14-4acb-8e6c-c1c37db690ae"} 2025-12-05T10:16:20.056Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "510d3727-7f36-4c71-a128-12ea2f616818"} 2025-12-05T10:16:25.424Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "cc4b5a24-0f85-46fa-8583-61ee7a3787a5"} 2025-12-05T10:16:30.860Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "6e7008af-0e68-4f38-a8e2-7bf8d674bbf2"} 2025-12-05T10:16:36.121Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d1e1e946-2aad-4960-8a38-a9259d397dd8"} 2025-12-05T10:16:41.428Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "2ab2683f-66bd-465d-85bb-f09c5ee30ca7"} 2025-12-05T10:16:47.164Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "bea75c7c-dc00-484d-9bfb-79f14b1778ed"} 2025-12-05T10:16:52.322Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "87eb6c66-d601-43ea-b5e1-bb3df15c2ff8"} 2025-12-05T10:16:57.525Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "2ae695b1-b3dd-47f7-8965-7e1beb2ea79a"} 2025-12-05T10:17:03.253Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "365912c7-5cf1-4159-a01e-1609b68ac122"} 2025-12-05T10:17:08.526Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "eb87929b-50ad-4682-a0b3-a792a5ff1c7d"} 2025-12-05T10:17:14.253Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "f0fc0c16-20e0-4304-adab-cbf47e62861d"} 2025-12-05T10:17:19.528Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "5c432a3a-5544-4c5a-ba37-ee17164eae0c"} 2025-12-05T10:17:24.744Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "1a7b42d6-4d66-44ce-9cdb-a566027d7814"} 2025-12-05T10:17:30.255Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "9ce9169a-6bf3-4268-baea-4e8ec743c291"} 2025-12-05T10:17:35.553Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "aa994a6b-9fe2-4d93-a965-413cb7a62329"} 2025-12-05T10:17:41.045Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "d01e5e21-fb45-4c35-970a-5498f44cde74"} 2025-12-05T10:17:46.261Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-17053"}, "namespace": "proxysql-scheduler-17053", "name": "some-name", "reconcileID": "417653e5-4063-402d-80ad-553f88046da9"} ... // 22 identical fields ... // 2 identical fields ... // 3 identical fields ... // 3 identical fields ... // 3 identical fields ... // 4 identical fields ... // 5 identical fields ... // 5 identical fields ... // 6 identical fields ... // 7 identical fields ... // 8 identical fields ... // 9 identical fields AccessModes: nil, ActiveDeadlineSeconds: nil, Affinity: nil, - Annotations: map[string]string{ + Annotations: map[string]string{ - APIVersion: "apps/v1", - APIVersion: "apps/v1", Args: {"mysqld"}, Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...}, AutomountServiceAccountToken: nil, + AvailableReplicas: 0, - AvailableReplicas: 2, - AvailableReplicas: 3, AWSElasticBlockStore: nil, AzureFile: nil, Capacity: nil, - CollisionCount: &0, + CollisionCount: nil, Conditions: nil, ConfigMap: &v1.ConfigMapVolumeSource{ ContainerPort: 3306, ContainerPort: 33060, ContainerPort: 33062, ContainerPort: 4444, ContainerPort: 4567, ContainerPort: 4568, ContainerPort: 6032, ContainerPort: 6070, Containers: []v1.Container{ + CreationTimestamp: v1.Time{}, - CreationTimestamp: v1.Time{Time: s"2025-12-05 10:02:59 +0000 UTC"}, + CurrentReplicas: 0, - CurrentReplicas: 2, - CurrentReplicas: 3, + CurrentRevision: "", - CurrentRevision: "some-name-proxysql-5dd8f84fb7", - CurrentRevision: "some-name-pxc-857f5f4c7c", DataSource: nil, DataSourceRef: nil, - DefaultMode: &420, - DefaultMode: &420, + DefaultMode: nil, + DefaultMode: nil, DeletionGracePeriodSeconds: nil, DeletionTimestamp: nil, + DeprecatedServiceAccount: "", - DeprecatedServiceAccount: "default", + DNSPolicy: "", - DNSPolicy: "ClusterFirst", EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}}, EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...}, Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...}, EphemeralContainers: nil, FailureThreshold: 3, FC: nil, - FieldsType: "FieldsV1", - FieldsType: "FieldsV1", - FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., - FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., Finalizers: nil, + Generation: 0, - Generation: 1, github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 GitRepo: nil, /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:857 HostAliases: nil, HostIP: "", HostPort: 0, ImagePullPolicy: "Always", InitContainers: []v1.Container{ InitialDelaySeconds: 300, ISCSI: nil, Items: nil, Items: nil, Labels: nil, Lifecycle: nil, LivenessProbe: &v1.Probe{ LocalObjectReference: {Name: "auto-some-name-pxc"}, LocalObjectReference: {Name: "some-name-pxc"}, + ManagedFields: nil, - ManagedFields: []v1.ManagedFieldsEntry{ - Manager: "kube-controller-manager", - Manager: "percona-xtradb-cluster-operator", MinReadySeconds: 0, Name: "auto-config", {Name: "bin", VolumeSource: {EmptyDir: &{}}}, Name: "config", Name: "ist", Name: "mysql", Name: "mysql-admin", Name: "mysql-init-file", Name: "mysql-users-secret-file", Name: "mysqlx", Name: "proxyadm", Namespace: "proxysql-scheduler-17053", Name: "ssl", Name: "ssl-internal", Name: "sst", Name: "stats", {Name: "tmp", VolumeSource: {EmptyDir: &{}}}, Name: "vault-keyring-secret", Name: "write-set", NFS: nil, NodeName: "", NodeSelector: nil, ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}}, ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, ObjectMeta: v1.ObjectMeta{ + ObservedGeneration: 0, - ObservedGeneration: 1, - Operation: "Update", - Operation: "Update", Optional: &false, Optional: &true, Optional: &true, Ordinals: nil, OS: nil, Overhead: nil, OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "e68c793d-00aa-4b78-a2f1-47d0e2cbc82f", ...}}, - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., - "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., + "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6NSwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., + PeriodSeconds: 0, - PeriodSeconds: 10, + PersistentVolumeClaimRetentionPolicy: nil, - PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", + Phase: "", - Phase: "Pending", + PodManagementPolicy: "", - PodManagementPolicy: "OrderedReady", Ports: []v1.ContainerPort{ PreemptionPolicy: nil, ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}}, + Protocol: "", - Protocol: "TCP", Quobyte: nil, ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...}, + ReadyReplicas: 0, - ReadyReplicas: 2, - ReadyReplicas: 3, + Replicas: 0, - Replicas: 2, - Replicas: &2, - Replicas: 3, - Replicas: &3, + Replicas: &3, + Replicas: &5, + ResourceVersion: "", - ResourceVersion: "1764929017736127008", - ResourceVersion: "1764929196923247017", + RestartPolicy: "", - RestartPolicy: "Always", - RevisionHistoryLimit: &10, + RevisionHistoryLimit: nil, + SchedulerName: "", - SchedulerName: "default-scheduler", SecretName: "internal-some-name", SecretName: "some-name-mysql-init", SecretName: "some-name-ssl", SecretName: "some-name-ssl-internal", SecretName: "some-name-vault", Secret: &v1.SecretVolumeSource{ SecurityContext: nil, Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}}, SelfLink: "", ServiceAccountName: "default", ServiceName: "some-name-proxysql-unready", ServiceName: "some-name-pxc", SetHostnameAsFQDN: nil, Spec: v1.PersistentVolumeClaimSpec{ Spec: v1.PodSpec{ Spec: v1.StatefulSetSpec{ StartupProbe: nil, Status: v1.PersistentVolumeClaimStatus{ Status: v1.StatefulSetStatus{ StorageClassName: nil, Subdomain: "", - Subresource: "status", SuccessThreshold: 1, Template: v1.PodTemplateSpec{ TerminationGracePeriodSeconds: &30, TerminationGracePeriodSeconds: &600, TerminationGracePeriodSeconds: nil, + TerminationMessagePath: "", - TerminationMessagePath: "/dev/termination-log", + TerminationMessagePolicy: "", - TerminationMessagePolicy: "File", TimeoutSeconds: 5, - Time: s"2025-12-05 10:02:59 +0000 UTC", - Time: s"2025-12-05 10:03:37 +0000 UTC", - Time: s"2025-12-05 10:06:36 +0000 UTC", Tolerations: nil, - TopologySpreadConstraints: nil, + TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, TypeMeta: {}, TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"}, + UID: "", - UID: "13cf915d-e794-4805-9538-732ef7333794", - UID: "6e532e4d-9aec-4cbe-8ced-adfad4070d0d", + UpdatedReplicas: 0, - UpdatedReplicas: 2, - UpdatedReplicas: 3, + UpdateRevision: "", - UpdateRevision: "some-name-proxysql-5dd8f84fb7", - UpdateRevision: "some-name-pxc-857f5f4c7c", UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}}, &v1.StatefulSet{ VolumeAttributesClassName: nil, VolumeClaimTemplates: []v1.PersistentVolumeClaim{ VolumeDevices: nil, - VolumeMode: &"Filesystem", + VolumeMode: nil, VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...}, VolumeName: "", VolumeSource: v1.VolumeSource{ Volumes: []v1.Volume{ VsphereVolume: nil, WorkingDir: "", + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + kubectl patch pxc -n proxysql-scheduler-17053 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.WEnJxxaBIN ++ mktemp + local LAST_ERR=/tmp/tmp.RZTs0OyH84 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WEnJxxaBIN perconaxtradbcluster.pxc.percona.com "some-name" deleted from proxysql-scheduler-17053 namespace + cat /tmp/tmp.RZTs0OyH84 + rm /tmp/tmp.WEnJxxaBIN /tmp/tmp.RZTs0OyH84 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.yLV3tvA2Of ++ mktemp + local LAST_ERR=/tmp/tmp.FNN06Sd6KF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yLV3tvA2Of No resources found + cat /tmp/tmp.FNN06Sd6KF + rm /tmp/tmp.yLV3tvA2Of /tmp/tmp.FNN06Sd6KF + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.zGBCXHFtT4 ++ mktemp + local LAST_ERR=/tmp/tmp.mXtBtqTvPk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zGBCXHFtT4 No resources found + cat /tmp/tmp.mXtBtqTvPk + rm /tmp/tmp.zGBCXHFtT4 /tmp/tmp.mXtBtqTvPk + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.sqX0ednaaj ++ mktemp + local LAST_ERR=/tmp/tmp.7MoV2ACirW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sqX0ednaaj validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.7MoV2ACirW + rm /tmp/tmp.sqX0ednaaj /tmp/tmp.7MoV2ACirW + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + rm -rf /tmp/tmp.m3LeJ6naj1 + kubectl_bin delete --grace-period=0 --force=true namespace proxysql-scheduler-17053 ++ mktemp + local LAST_OUT=/tmp/tmp.aWHbZVB32v ++ mktemp + local LAST_ERR=/tmp/tmp.boey60J7xx + local exit_status=0 ++ seq 0 2 + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace proxysql-scheduler-17053 ++ mktemp + local LAST_OUT=/tmp/tmp.mWOIOeEGR1 ++ mktemp + local LAST_ERR=/tmp/tmp.0DCYUyZNv8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator