Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/logs/proxysql-scheduler-8-0.log Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.34) and server (1.31) exceeds the supported minor version skew of +/-1 + cluster=some-name + create_infra proxysql-scheduler-31142 + local ns=proxysql-scheduler-31142 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n proxysql-scheduler-31107 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.bI0U4zpuxe ++ mktemp + local LAST_ERR=/tmp/tmp.KfTw3B0021 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bI0U4zpuxe perconaxtradbcluster.pxc.percona.com "some-name" deleted from proxysql-scheduler-31107 namespace + cat /tmp/tmp.KfTw3B0021 + rm /tmp/tmp.bI0U4zpuxe /tmp/tmp.KfTw3B0021 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.mh5zYVl5Ph ++ mktemp + local LAST_ERR=/tmp/tmp.5YxfbRc3nN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mh5zYVl5Ph No resources found + cat /tmp/tmp.5YxfbRc3nN + rm /tmp/tmp.mh5zYVl5Ph /tmp/tmp.5YxfbRc3nN + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.nBcGlJ0qo7 ++ mktemp + local LAST_ERR=/tmp/tmp.epLMEh67Nd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nBcGlJ0qo7 No resources found + cat /tmp/tmp.epLMEh67Nd + rm /tmp/tmp.nBcGlJ0qo7 /tmp/tmp.epLMEh67Nd + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl api-resources ++ grep chaos-mesh.org ++ kubectl get crd ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get clusterrolebinding + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrole ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- + awk '{print$1}' cleaned up old namespaces pxc-operator++ mktemp ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.RcA3MnBK5d ++ mktemp + local LAST_OUT=/tmp/tmp.cMiyqBfaGO + local LAST_ERR=/tmp/tmp.bVrssGewsq + local exit_status=0 ++ mktemp + xargs kubectl delete ns + local LAST_ERR=/tmp/tmp.LfumaPu0iT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cMiyqBfaGO + cat /tmp/tmp.LfumaPu0iT + rm /tmp/tmp.cMiyqBfaGO /tmp/tmp.LfumaPu0iT + return 0 namespace "proxysql-scheduler-31107" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RcA3MnBK5d namespace "pxc-operator" deleted + cat /tmp/tmp.bVrssGewsq + rm /tmp/tmp.RcA3MnBK5d /tmp/tmp.bVrssGewsq + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.XVVuEPHwNr ++ mktemp + local LAST_ERR=/tmp/tmp.ZL9d6NsrZc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XVVuEPHwNr namespace/pxc-operator created + cat /tmp/tmp.ZL9d6NsrZc + rm /tmp/tmp.XVVuEPHwNr /tmp/tmp.ZL9d6NsrZc + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.A15vLqBLDZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.npH5CUGa4Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.A15vLqBLDZ ++ cat /tmp/tmp.npH5CUGa4Y ++ rm /tmp/tmp.A15vLqBLDZ /tmp/tmp.npH5CUGa4Y ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2270-21635d9b-9-cluster9 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.K5vdDnzojg ++ mktemp + local LAST_ERR=/tmp/tmp.biIWHlWKRg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2270-21635d9b-9-cluster9 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.K5vdDnzojg Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2270-21635d9b-9-cluster9" modified. + cat /tmp/tmp.biIWHlWKRg + rm /tmp/tmp.K5vdDnzojg /tmp/tmp.biIWHlWKRg + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.Oph3t5eqDG ++ mktemp + local LAST_ERR=/tmp/tmp.t5s4EaGOFe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Oph3t5eqDG customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.t5s4EaGOFe + rm /tmp/tmp.Oph3t5eqDG /tmp/tmp.t5s4EaGOFe + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + sed -e 's^namespace: .*^namespace: pxc-operator^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/deploy/cw-rbac.yaml + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.fb49CG5oOt ++ mktemp + local LAST_ERR=/tmp/tmp.OYOAaEyusy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fb49CG5oOt clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.OYOAaEyusy + rm /tmp/tmp.fb49CG5oOt /tmp/tmp.OYOAaEyusy + return 0 + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2270-21635d9b^' ++ mktemp + local LAST_OUT=/tmp/tmp.G12KeKdzjo ++ mktemp + local LAST_ERR=/tmp/tmp.WvnfJsjKyY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.G12KeKdzjo deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.WvnfJsjKyY + rm /tmp/tmp.G12KeKdzjo /tmp/tmp.WvnfJsjKyY + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.jMj7pQgQQz ++ mktemp + local LAST_ERR=/tmp/tmp.o1XgesXWef + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jMj7pQgQQz pod/percona-xtradb-cluster-operator-5df89bfbd-2mxk8 condition met + cat /tmp/tmp.o1XgesXWef + rm /tmp/tmp.jMj7pQgQQz /tmp/tmp.o1XgesXWef + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.1Od4ui21o0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KNd91VyOcZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1Od4ui21o0 ++ cat /tmp/tmp.KNd91VyOcZ ++ rm /tmp/tmp.1Od4ui21o0 /tmp/tmp.KNd91VyOcZ ++ return 0 + wait_pod percona-xtradb-cluster-operator-5df89bfbd-2mxk8 480 pxc-operator + local pod=percona-xtradb-cluster-operator-5df89bfbd-2mxk8 + local max_retry=480 + local ns=pxc-operator ++ grep -E '^(pxc|proxysql)$' ++ echo percona-xtradb-cluster-operator-5df89bfbd-2mxk8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-5df89bfbd-2mxk8 condition met waiting for pod/percona-xtradb-cluster-operator-5df89bfbd-2mxk8 to become Ready.Ok + sleep 3 + create_namespace proxysql-scheduler-31142 + local namespace=proxysql-scheduler-31142 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// ++ tail -n1 ++ helm list --all-namespaces --filter chaos-mesh + local chaos_mesh_ns= + '[' -n '' ']' ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh.org ++ kubectl get crd ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces proxysql-scheduler-31142' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces proxysql-scheduler-31142 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace proxysql-scheduler-31142 + awk '{print$1}' + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.gNwGrOa7qN + kubectl_bin get ns ++ mktemp + local LAST_ERR=/tmp/tmp.OyvCjJwEiv + local exit_status=0 ++ mktemp + local LAST_OUT=/tmp/tmp.zPt2Spx6y5 ++ seq 0 2 ++ mktemp + local LAST_ERR=/tmp/tmp.L7T1RwBIRx + local exit_status=0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-31142 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-31142 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zPt2Spx6y5 + cat /tmp/tmp.L7T1RwBIRx + rm /tmp/tmp.zPt2Spx6y5 /tmp/tmp.L7T1RwBIRx + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxysql-scheduler-31142 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.gNwGrOa7qN + cat /tmp/tmp.OyvCjJwEiv Error from server (NotFound): namespaces "proxysql-scheduler-31142" not found + rm /tmp/tmp.gNwGrOa7qN /tmp/tmp.OyvCjJwEiv + return 1 + : + wait_for_delete namespace/proxysql-scheduler-31142 + local res=namespace/proxysql-scheduler-31142 + echo -n 'waiting for namespace/proxysql-scheduler-31142 to be deleted' waiting for namespace/proxysql-scheduler-31142 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "proxysql-scheduler-31142" not found + desc 'create namespace proxysql-scheduler-31142' + set +o xtrace ----------------------------------------------------------------------------------- create namespace proxysql-scheduler-31142 ----------------------------------------------------------------------------------- + kubectl_bin create namespace proxysql-scheduler-31142 ++ mktemp + local LAST_OUT=/tmp/tmp.I1PQbXe5Y3 ++ mktemp + local LAST_ERR=/tmp/tmp.UAvqt4lkN9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace proxysql-scheduler-31142 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.I1PQbXe5Y3 namespace/proxysql-scheduler-31142 created + cat /tmp/tmp.UAvqt4lkN9 + rm /tmp/tmp.I1PQbXe5Y3 /tmp/tmp.UAvqt4lkN9 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.3CGNT1WSGY +++ mktemp ++ local LAST_ERR=/tmp/tmp.YMabikpRlT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3CGNT1WSGY ++ cat /tmp/tmp.YMabikpRlT ++ rm /tmp/tmp.3CGNT1WSGY /tmp/tmp.YMabikpRlT ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2270-21635d9b-9-cluster9 --namespace=proxysql-scheduler-31142 ++ mktemp + local LAST_OUT=/tmp/tmp.1bUX3VcdPp ++ mktemp + local LAST_ERR=/tmp/tmp.nXlRh4ZRsn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2270-21635d9b-9-cluster9 --namespace=proxysql-scheduler-31142 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1bUX3VcdPp Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2270-21635d9b-9-cluster9" modified. + cat /tmp/tmp.nXlRh4ZRsn + rm /tmp/tmp.1bUX3VcdPp /tmp/tmp.nXlRh4ZRsn + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ziPYQtYB9Z ++ mktemp + local LAST_ERR=/tmp/tmp.Lg5VxzRXkU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ziPYQtYB9Z secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.Lg5VxzRXkU + rm /tmp/tmp.ziPYQtYB9Z /tmp/tmp.Lg5VxzRXkU + return 0 + desc 'create PXC cluster: some-name' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster: some-name ----------------------------------------------------------------------------------- + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.mQxRZXiUP8 ++ mktemp + local LAST_ERR=/tmp/tmp.nAoB3a6XEX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mQxRZXiUP8 secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.nAoB3a6XEX + rm /tmp/tmp.mQxRZXiUP8 /tmp/tmp.nAoB3a6XEX + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/client.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/client.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/conf/client.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.zSuEpPsS1D + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2270-21635d9b#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.FxRGXUrd7E + local exit_status=0 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ seq 0 2 + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.proxysql-scheduler-31142~ + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zSuEpPsS1D deployment.apps/pxc-client created + cat /tmp/tmp.FxRGXUrd7E + rm /tmp/tmp.zSuEpPsS1D /tmp/tmp.FxRGXUrd7E + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/conf/some-name.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/conf/some-name.yml + local pvc_name= + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/conf/some-name.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/conf/some-name.yml + local pvc_name= + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.q9mAau7Q02 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2270-21635d9b#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.proxysql-scheduler-31142~ + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + local LAST_ERR=/tmp/tmp.IBhby6iRvE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.q9mAau7Q02 perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.IBhby6iRvE + rm /tmp/tmp.q9mAau7Q02 /tmp/tmp.IBhby6iRvE + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.W98itYlqUy ++++ mktemp +++ local LAST_ERR=/tmp/tmp.30ZzJ25GeE +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.W98itYlqUy +++ cat /tmp/tmp.30ZzJ25GeE +++ rm /tmp/tmp.W98itYlqUy /tmp/tmp.30ZzJ25GeE +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.7mYCNQ5Euv ++++ mktemp +++ local LAST_ERR=/tmp/tmp.ZTUxnIR35s +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.7mYCNQ5Euv +++ cat /tmp/tmp.ZTUxnIR35s +++ rm /tmp/tmp.7mYCNQ5Euv /tmp/tmp.ZTUxnIR35s +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-31142 ++ mktemp + local LAST_OUT=/tmp/tmp.NdwPlbUDpc ++ mktemp + local LAST_ERR=/tmp/tmp.ZF5OEzHM4j + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-31142 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-31142 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxysql-scheduler-31142 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.NdwPlbUDpc + cat /tmp/tmp.ZF5OEzHM4j error: no matching resources found + rm /tmp/tmp.NdwPlbUDpc /tmp/tmp.ZF5OEzHM4j + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo some-name-proxysql-0 + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met waiting for pod/some-name-proxysql-0 to become Ready.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo some-name-pxc-0 + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met waiting for pod/some-name-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met waiting for pod/some-name-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met waiting for pod/some-name-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc some-name -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A4eTImqBWf +++ mktemp ++ local LAST_ERR=/tmp/tmp.6ByCTKcEB4 ++ local exit_status=0 +++ seq 0 2 ++ base64 --decode ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.A4eTImqBWf ++ cat /tmp/tmp.6ByCTKcEB4 ++ rm /tmp/tmp.A4eTImqBWf /tmp/tmp.6ByCTKcEB4 ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5B8pE5c0Ky +++ mktemp ++ local LAST_ERR=/tmp/tmp.Kcgk3yk38o ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5B8pE5c0Ky ++ cat /tmp/tmp.Kcgk3yk38o ++ rm /tmp/tmp.5B8pE5c0Ky /tmp/tmp.Kcgk3yk38o ++ return 0 + client_pod=pxc-client-59944c5bbf-p6zcf + wait_pod pxc-client-59944c5bbf-p6zcf + local pod=pxc-client-59944c5bbf-p6zcf + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-p6zcf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-p6zcf condition met waiting for pod/pxc-client-59944c5bbf-p6zcf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cRntlnzsky +++ mktemp ++ local LAST_ERR=/tmp/tmp.4T7laSQdUp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cRntlnzsky ++ cat /tmp/tmp.4T7laSQdUp ++ rm /tmp/tmp.cRntlnzsky /tmp/tmp.4T7laSQdUp ++ return 0 + client_pod=pxc-client-59944c5bbf-p6zcf + wait_pod pxc-client-59944c5bbf-p6zcf + local pod=pxc-client-59944c5bbf-p6zcf + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-p6zcf ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-p6zcf condition met waiting for pod/pxc-client-59944c5bbf-p6zcf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pJ8oRs4zy3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.F52g3ZTMRY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pJ8oRs4zy3 ++ cat /tmp/tmp.F52g3ZTMRY ++ rm /tmp/tmp.pJ8oRs4zy3 /tmp/tmp.F52g3ZTMRY ++ return 0 + client_pod=pxc-client-59944c5bbf-p6zcf + wait_pod pxc-client-59944c5bbf-p6zcf + local pod=pxc-client-59944c5bbf-p6zcf + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-59944c5bbf-p6zcf + local container= + set +o xtrace pod/pxc-client-59944c5bbf-p6zcf condition met waiting for pod/pxc-client-59944c5bbf-p6zcf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.623IQJtkKw/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.623IQJtkKw/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vNT9pJv6GX +++ mktemp ++ local LAST_ERR=/tmp/tmp.clbSBF5vlP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vNT9pJv6GX ++ cat /tmp/tmp.clbSBF5vlP ++ rm /tmp/tmp.vNT9pJv6GX /tmp/tmp.clbSBF5vlP ++ return 0 + client_pod=pxc-client-59944c5bbf-p6zcf + wait_pod pxc-client-59944c5bbf-p6zcf + local pod=pxc-client-59944c5bbf-p6zcf + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-p6zcf ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-p6zcf condition met waiting for pod/pxc-client-59944c5bbf-p6zcf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.623IQJtkKw/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.623IQJtkKw/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.agoXu00fMG +++ mktemp ++ local LAST_ERR=/tmp/tmp.OAIu5Zp4Cw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.agoXu00fMG ++ cat /tmp/tmp.OAIu5Zp4Cw ++ rm /tmp/tmp.agoXu00fMG /tmp/tmp.OAIu5Zp4Cw ++ return 0 + client_pod=pxc-client-59944c5bbf-p6zcf + wait_pod pxc-client-59944c5bbf-p6zcf + local pod=pxc-client-59944c5bbf-p6zcf + local max_retry=480 + local ns= ++ echo pxc-client-59944c5bbf-p6zcf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-59944c5bbf-p6zcf condition met waiting for pod/pxc-client-59944c5bbf-p6zcf to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + '[' '!' -s /tmp/tmp.623IQJtkKw/select-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/select-1.sql /tmp/tmp.623IQJtkKw/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ grep -E -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LofXCiiyk7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ryMCV77dj9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LofXCiiyk7 ++ cat /tmp/tmp.ryMCV77dj9 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.LofXCiiyk7 /tmp/tmp.ryMCV77dj9 ++ return 0 + '[' '' ']' + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/some-name to be ready' waiting for pxc/some-name to be ready++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KBQ0qDuUZC +++ mktemp ++ local LAST_ERR=/tmp/tmp.RJ5PInruJa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KBQ0qDuUZC ++ cat /tmp/tmp.RJ5PInruJa ++ rm /tmp/tmp.KBQ0qDuUZC /tmp/tmp.RJ5PInruJa ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zRllHO5FRA +++ mktemp ++ local LAST_ERR=/tmp/tmp.8Dx9FLTe0f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zRllHO5FRA ++ cat /tmp/tmp.8Dx9FLTe0f ++ rm /tmp/tmp.zRllHO5FRA /tmp/tmp.8Dx9FLTe0f ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Wjmeg92HQJ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.QLE0CnkPVs +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Wjmeg92HQJ +++++ cat /tmp/tmp.QLE0CnkPVs +++++ rm /tmp/tmp.Wjmeg92HQJ /tmp/tmp.QLE0CnkPVs +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.9PeuypJzDL ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.yIJ61TuL7P +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.9PeuypJzDL +++++ cat /tmp/tmp.yIJ61TuL7P +++++ rm /tmp/tmp.9PeuypJzDL /tmp/tmp.yIJ61TuL7P +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PATPYTUTJQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.2XTDbLxHiT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PATPYTUTJQ ++ cat /tmp/tmp.2XTDbLxHiT ++ rm /tmp/tmp.PATPYTUTJQ /tmp/tmp.2XTDbLxHiT ++ return 0 + [[ 2 == \2 ]] + echo + desc 'check if service and statefulset created with expected config' + set +o xtrace ----------------------------------------------------------------------------------- check if service and statefulset created with expected config ----------------------------------------------------------------------------------- + compare_kubectl statefulset/some-name-pxc + local resource=statefulset/some-name-pxc + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc.yml + local new_result=/tmp/tmp.623IQJtkKw/statefulset_some-name-pxc.yml + desc 'compare statefulset/some-name-pxc-' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/some-name-pxc- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k129.yml ']' + version_gt 1.27 ++ bc -l ++ echo '1.31 >= 1.27' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k127.yml ']' + version_gt 1.24 ++ bc -l ++ echo '1.31 >= 1.24' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k124.yml ']' + version_gt 1.22 ++ bc -l ++ echo '1.31 >= 1.22' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k122.yml ']' + version_gt 1.21 ++ bc -l ++ echo '1.31 >= 1.21' + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-oc.yml ']' + version_gt 1.29 ++ bc -l ++ echo '1.31 >= 1.29' + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc-aks.yml ']' + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-31142", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + kubectl_bin get -o yaml statefulset/some-name-pxc ++ mktemp + local LAST_OUT=/tmp/tmp.BpIMfRHDeS ++ mktemp + local LAST_ERR=/tmp/tmp.6LqZsLQY0B + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-pxc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BpIMfRHDeS + cat /tmp/tmp.6LqZsLQY0B + rm /tmp/tmp.BpIMfRHDeS /tmp/tmp.6LqZsLQY0B + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-pxc.yml /tmp/tmp.623IQJtkKw/statefulset_some-name-pxc.yml + log 'compare_kubectl: statefulset/some-name-pxc OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:41:55+0000]' compare_kubectl: statefulset/some-name-pxc OK [2025-11-28T12:41:55+0000] compare_kubectl: statefulset/some-name-pxc OK + compare_kubectl statefulset/some-name-proxysql + local resource=statefulset/some-name-proxysql + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql.yml + local new_result=/tmp/tmp.623IQJtkKw/statefulset_some-name-proxysql.yml + desc 'compare statefulset/some-name-proxysql-' + set +o xtrace ----------------------------------------------------------------------------------- compare statefulset/some-name-proxysql- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k129.yml ']' + version_gt 1.27 ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k127.yml ']' + version_gt 1.24 ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k124.yml ']' + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-oc.yml ']' + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql-aks.yml ']' + kubectl_bin get -o yaml statefulset/some-name-proxysql ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-31142", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.sZ2uFzGm1r ++ mktemp + local LAST_ERR=/tmp/tmp.ygWC2v9qNd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml statefulset/some-name-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sZ2uFzGm1r + cat /tmp/tmp.ygWC2v9qNd + rm /tmp/tmp.sZ2uFzGm1r /tmp/tmp.ygWC2v9qNd + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/statefulset_some-name-proxysql.yml /tmp/tmp.623IQJtkKw/statefulset_some-name-proxysql.yml + log 'compare_kubectl: statefulset/some-name-proxysql OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:41:57+0000]' compare_kubectl: statefulset/some-name-proxysql OK [2025-11-28T12:41:57+0000] compare_kubectl: statefulset/some-name-proxysql OK + compare_kubectl service/some-name-pxc + local resource=service/some-name-pxc + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc.yml + local new_result=/tmp/tmp.623IQJtkKw/service_some-name-pxc.yml + desc 'compare service/some-name-pxc-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-pxc- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k129.yml ']' + version_gt 1.27 ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k127.yml ']' + version_gt 1.24 ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k124.yml ']' + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-oc.yml ']' + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc-aks.yml ']' + kubectl_bin get -o yaml service/some-name-pxc ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-31142", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.LqisR9hQTZ ++ mktemp + local LAST_ERR=/tmp/tmp.Q1yhol6EV7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-pxc + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LqisR9hQTZ + cat /tmp/tmp.Q1yhol6EV7 + rm /tmp/tmp.LqisR9hQTZ /tmp/tmp.Q1yhol6EV7 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-pxc.yml /tmp/tmp.623IQJtkKw/service_some-name-pxc.yml + log 'compare_kubectl: service/some-name-pxc OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:41:58+0000]' compare_kubectl: service/some-name-pxc OK [2025-11-28T12:41:58+0000] compare_kubectl: service/some-name-pxc OK + compare_kubectl service/some-name-proxysql + local resource=service/some-name-proxysql + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql.yml + local new_result=/tmp/tmp.623IQJtkKw/service_some-name-proxysql.yml + desc 'compare service/some-name-proxysql-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-proxysql- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k129.yml ']' + version_gt 1.27 ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k127.yml ']' + version_gt 1.24 ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k124.yml ']' + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-oc.yml ']' + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-aks.yml ']' + kubectl_bin get -o yaml service/some-name-proxysql ++ mktemp + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-31142", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - + local LAST_OUT=/tmp/tmp.dxs0zEqkyp ++ mktemp + local LAST_ERR=/tmp/tmp.8FbUlhpxaR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dxs0zEqkyp + cat /tmp/tmp.8FbUlhpxaR + rm /tmp/tmp.dxs0zEqkyp /tmp/tmp.8FbUlhpxaR + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql.yml /tmp/tmp.623IQJtkKw/service_some-name-proxysql.yml + log 'compare_kubectl: service/some-name-proxysql OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:41:58+0000]' compare_kubectl: service/some-name-proxysql OK [2025-11-28T12:41:58+0000] compare_kubectl: service/some-name-proxysql OK + compare_kubectl service/some-name-proxysql-unready + local resource=service/some-name-proxysql-unready + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready.yml + local new_result=/tmp/tmp.623IQJtkKw/service_some-name-proxysql-unready.yml + desc 'compare service/some-name-proxysql-unready-' + set +o xtrace ----------------------------------------------------------------------------------- compare service/some-name-proxysql-unready- ----------------------------------------------------------------------------------- + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-eks.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-80.yml ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + version_gt 1.33 ++ echo '1.31 >= 1.33' ++ bc -l + '[' 0 -eq 1 ']' + return 1 + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k129.yml ']' + version_gt 1.27 ++ echo '1.31 >= 1.27' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k127.yml ']' + version_gt 1.24 ++ echo '1.31 >= 1.24' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k124.yml ']' + version_gt 1.22 ++ echo '1.31 >= 1.22' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k122.yml ']' + version_gt 1.21 ++ echo '1.31 >= 1.21' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k121.yml ']' + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-oc.yml ']' + version_gt 1.29 ++ echo '1.31 >= 1.29' ++ bc -l + '[' 1 -eq 1 ']' + return 0 + '[' '!' -z '' -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-k129-oc.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-eks.yml ']' + '[' 0 = 1 -a -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready-aks.yml ']' + kubectl_bin get -o yaml service/some-name-proxysql-unready + yq eval ' del(.metadata.managedFields) | del(.. | select(has("creationTimestamp")).creationTimestamp) | del(.. | select(has("namespace")).namespace) | del(.. | select(has("uid")).uid) | del(.metadata.resourceVersion) | del(.spec.template.spec.containers[].env[] | select(.name == "CLUSTER_HASH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "BACKUP_PATH")) | del(.spec.template.spec.containers[].env[] | select(.name == "S3_BUCKET_URL")) | del(.spec.template.spec.containers[].env[] | select(.name == "AZURE_CONTAINER_NAME")) | del(.metadata.selfLink) | del(.metadata.deletionTimestamp) | del(.metadata.annotations."kubectl.kubernetes.io/last-applied-configuration") | del(.metadata.annotations."kubernetes.io/psp") | del(.metadata.annotations."batch.kubernetes.io/job-tracking") | del(.metadata.labels."batch.kubernetes.io/job-name") | del(.metadata.labels."job-name") | del(.metadata.annotations."cloud.google.com/neg") | del(.metadata.annotations."k8s.v1.cni.cncf.io*") | del(.metadata.annotations."k8s.ovn.org/pod-networks") | del(.spec.template.metadata.annotations."last-applied-secret") | del(.spec.template.metadata.labels."batch.kubernetes.io/job-name") | del(.spec.template.metadata.labels."job-name") | del(.. | select(has("batch.kubernetes.io/controller-uid"))."batch.kubernetes.io/controller-uid") | del(.. | select(has("image")).image) | del(.. | select(has("clusterIP")).clusterIP) | del(.. | select(has("clusterIPs")).clusterIPs) | del(.. | select(has("dataSource")).dataSource) | del(.. | select(has("procMount")).procMount) | del(.. | select(has("storageClassName")).storageClassName) | del(.. | select(has("finalizers")).finalizers) | del(.. | select(has("kubernetes.io/pvc-protection"))."kubernetes.io/pvc-protection") | del(.. | select(has("volumeName")).volumeName) | del(.. | select(has("volume.beta.kubernetes.io/storage-provisioner"))."volume.beta.kubernetes.io/storage-provisioner") | del(.. | select(has("volume.kubernetes.io/storage-provisioner"))."volume.kubernetes.io/storage-provisioner") | del(.spec.volumeMode) | del(.spec.nodeName) | del(.. | select(has("volume.kubernetes.io/selected-node"))."volume.kubernetes.io/selected-node") | del(.. | select(has("percona.com/last-config-hash"))."percona.com/last-config-hash") | del(.. | select(has("percona.com/configuration-hash"))."percona.com/configuration-hash") | del(.. | select(has("percona.com/env-secret-config-hash"))."percona.com/env-secret-config-hash") | del(.. | select(has("percona.com/ssl-hash"))."percona.com/ssl-hash") | del(.. | select(has("percona.com/ssl-internal-hash"))."percona.com/ssl-internal-hash") | del(.. | select(has("kubectl.kubernetes.io/default-container"))."kubectl.kubernetes.io/default-container") | del(.spec.volumeClaimTemplates[].spec.volumeMode | select(. == "Filesystem")) | del(.. | select(has("healthCheckNodePort")).healthCheckNodePort) | del(.. | select(has("nodePort")).nodePort) | del(.. | select(has("imagePullSecrets")).imagePullSecrets) | del(.. | select(has("enableServiceLinks")).enableServiceLinks) | del(.status) | del(.spec.volumeClaimTemplates[].apiVersion) | del(.spec.volumeClaimTemplates[].kind) | del(.metadata.ownerReferences[].apiVersion) | del(.. | select(has("controller-uid")).controller-uid) | del(.. | select(has("preemptionPolicy")).preemptionPolicy) | del(.spec.ipFamilies) | del(.spec.ipFamilyPolicy) | (.. | select(. == "policy/v1beta1")) = "policy/v1" | del(.. | select(has("kubernetes.io/hostname"))."kubernetes.io/hostname") | (.. | select(tag == "!!str")) |= sub("proxysql-scheduler-31142", "namespace") | (.. | select(tag == "!!str")) |= sub("kube-api-access-.*", "kube-api-access") | del(.. | select(has("annotations")).annotations | select(length==0)) | del(.spec.crVersion) | del(.. | select(.[] == "percona-xtradb-cluster-operator-workload-token*"))' - ++ mktemp + local LAST_OUT=/tmp/tmp.dobMuDzN0B ++ mktemp + local LAST_ERR=/tmp/tmp.nnX7euFBU1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get -o yaml service/some-name-proxysql-unready + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dobMuDzN0B + cat /tmp/tmp.nnX7euFBU1 + rm /tmp/tmp.dobMuDzN0B /tmp/tmp.nnX7euFBU1 + return 0 + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/service_some-name-proxysql-unready.yml /tmp/tmp.623IQJtkKw/service_some-name-proxysql-unready.yml + log 'compare_kubectl: service/some-name-proxysql-unready OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:42:00+0000]' compare_kubectl: service/some-name-proxysql-unready OK [2025-11-28T12:42:00+0000] compare_kubectl: service/some-name-proxysql-unready OK + sleep 120 + desc 'check if scheduler is enabled in all ProxySQL servers' + set +o xtrace ----------------------------------------------------------------------------------- check if scheduler is enabled in all ProxySQL servers ----------------------------------------------------------------------------------- + compare_scheduler some-name-proxysql-0 scheduler-0 + local pod=some-name-proxysql-0 + local compare_file=scheduler-0 + compare_mysql_cmd_local scheduler-0 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=scheduler-0 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/scheduler-0-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.623IQJtkKw/scheduler-0.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/scheduler-0.sql /tmp/tmp.623IQJtkKw/scheduler-0.sql + log 'scheduler is enabled in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:44:02+0000]' scheduler is enabled in some-name-proxysql-0: OK [2025-11-28T12:44:02+0000] scheduler is enabled in some-name-proxysql-0: OK + compare_scheduler some-name-proxysql-1 scheduler-1 + local pod=some-name-proxysql-1 + local compare_file=scheduler-1 + compare_mysql_cmd_local scheduler-1 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=scheduler-1 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/scheduler-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.623IQJtkKw/scheduler-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/scheduler-1.sql /tmp/tmp.623IQJtkKw/scheduler-1.sql + log 'scheduler is enabled in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:44:04+0000]' scheduler is enabled in some-name-proxysql-1: OK [2025-11-28T12:44:04+0000] scheduler is enabled in some-name-proxysql-1: OK + desc 'check if scheduler is doing its job in all ProxySQL servers' + set +o xtrace ----------------------------------------------------------------------------------- check if scheduler is doing its job in all ProxySQL servers ----------------------------------------------------------------------------------- + compare_mysql_servers some-name-proxysql-0 mysql-servers-0 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0 + compare_mysql_cmd_local mysql-servers-0 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=mysql-servers-0 + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-0.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-0-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.623IQJtkKw/mysql-servers-0.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-0.sql /tmp/tmp.623IQJtkKw/mysql-servers-0.sql + log 'mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:44:05+0000]' mysql_servers are configured in some-name-proxysql-0: OK [2025-11-28T12:44:05+0000] mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1 + compare_mysql_cmd_local mysql-servers-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=mysql-servers-1 + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.623IQJtkKw/mysql-servers-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-1.sql /tmp/tmp.623IQJtkKw/mysql-servers-1.sql + log 'mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:44:07+0000]' mysql_servers are configured in some-name-proxysql-1: OK [2025-11-28T12:44:07+0000] mysql_servers are configured in some-name-proxysql-1: OK + desc 'check scaling PXC up' + set +o xtrace ----------------------------------------------------------------------------------- check scaling PXC up ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 5}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.5n0iA8A6sU ++ mktemp + local LAST_ERR=/tmp/tmp.BNQOqpRaqG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"pxc": {"size": 5}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5n0iA8A6sU perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.BNQOqpRaqG + rm /tmp/tmp.5n0iA8A6sU /tmp/tmp.BNQOqpRaqG + return 0 + wait_pod some-name-pxc-3 + local pod=some-name-pxc-3 + local max_retry=480 + local ns= ++ echo some-name-pxc-3 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-3 condition met waiting for pod/some-name-pxc-3 to become Ready.Ok + wait_pod some-name-pxc-4 + local pod=some-name-pxc-4 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-pxc-4 + local container=pxc + set +o xtrace pod/some-name-pxc-4 condition met waiting for pod/some-name-pxc-4 to become Ready.Ok + sleep 120 + compare_mysql_servers some-name-proxysql-0 mysql-servers-0-1 + local pod=some-name-proxysql-0 + local compare_file=mysql-servers-0-1 + compare_mysql_cmd_local mysql-servers-0-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 + local command_id=mysql-servers-0-1 + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-0-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-0-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-0 '' + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-0 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.623IQJtkKw/mysql-servers-0-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-0-1.sql /tmp/tmp.623IQJtkKw/mysql-servers-0-1.sql + log 'new mysql_servers are configured in some-name-proxysql-0: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:48:42+0000]' new mysql_servers are configured in some-name-proxysql-0: OK [2025-11-28T12:48:42+0000] new mysql_servers are configured in some-name-proxysql-0: OK + compare_mysql_servers some-name-proxysql-1 mysql-servers-1-1 + local pod=some-name-proxysql-1 + local compare_file=mysql-servers-1-1 + compare_mysql_cmd_local mysql-servers-1-1 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 + local command_id=mysql-servers-1-1 + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-1-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-1-1-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-1 '' + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-1 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.623IQJtkKw/mysql-servers-1-1.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-1-1.sql /tmp/tmp.623IQJtkKw/mysql-servers-1-1.sql + log 'new mysql_servers are configured in some-name-proxysql-1: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:48:43+0000]' new mysql_servers are configured in some-name-proxysql-1: OK [2025-11-28T12:48:43+0000] new mysql_servers are configured in some-name-proxysql-1: OK + desc 'check scaling ProxySQL up' + set +o xtrace ----------------------------------------------------------------------------------- check scaling ProxySQL up ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"size": 3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.lIqNgZNLhN ++ mktemp + local LAST_ERR=/tmp/tmp.XJdvHfMizu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge -p '{"spec": {"proxysql": {"size": 3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lIqNgZNLhN perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.XJdvHfMizu + rm /tmp/tmp.lIqNgZNLhN /tmp/tmp.XJdvHfMizu + return 0 + wait_pod some-name-proxysql-2 + local pod=some-name-proxysql-2 + local max_retry=480 + local ns= ++ echo some-name-proxysql-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-2 condition met waiting for pod/some-name-proxysql-2 to become Ready.Ok + sleep 120 + compare_scheduler some-name-proxysql-2 scheduler-2 + local pod=some-name-proxysql-2 + local compare_file=scheduler-2 + compare_mysql_cmd_local scheduler-2 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + local command_id=scheduler-2 + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/scheduler-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/scheduler-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT * FROM runtime_scheduler;' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 '' + local 'command=SELECT * FROM runtime_scheduler;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.623IQJtkKw/scheduler-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/scheduler-2.sql /tmp/tmp.623IQJtkKw/scheduler-2.sql + log 'scheduler is enabled in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:51:13+0000]' scheduler is enabled in some-name-proxysql-2: OK [2025-11-28T12:51:13+0000] scheduler is enabled in some-name-proxysql-2: OK + compare_mysql_servers some-name-proxysql-2 mysql-servers-2 + local pod=some-name-proxysql-2 + local compare_file=mysql-servers-2 + compare_mysql_cmd_local mysql-servers-2 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 + local command_id=mysql-servers-2 + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local postfix= + local container_name= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-2-80.sql ']' + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + run_mysql_local 'SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' some-name-proxysql-2 '' + local 'command=SELECT hostgroup_id,SUBSTRING(hostname, 0, 16),weight,status FROM runtime_mysql_servers WHERE status='\''ONLINE'\''' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' + local pod=some-name-proxysql-2 + local container_name= + set +o xtrace + '[' '!' -s /tmp/tmp.623IQJtkKw/mysql-servers-2.sql ']' + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2270/e2e-tests/proxysql-scheduler/compare/mysql-servers-2.sql /tmp/tmp.623IQJtkKw/mysql-servers-2.sql + log 'mysql_servers are configured in some-name-proxysql-2: OK' ++ date +%Y-%m-%dT%H:%M:%S%z + echo '[2025-11-28T12:51:15+0000]' mysql_servers are configured in some-name-proxysql-2: OK [2025-11-28T12:51:15+0000] mysql_servers are configured in some-name-proxysql-2: OK + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + destroy proxysql-scheduler-31142 + local namespace=proxysql-scheduler-31142 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + tee /tmp/tmp.623IQJtkKw/operator.log + sort -u + grep -v 'get backup status: Job.batch' + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v level=info ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + grep -v 'the object has been modified' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.GfQbiUk8dN +++ mktemp ++ local LAST_ERR=/tmp/tmp.egylAP9Iix ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GfQbiUk8dN ++ cat /tmp/tmp.egylAP9Iix ++ rm /tmp/tmp.GfQbiUk8dN /tmp/tmp.egylAP9Iix ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-5df89bfbd-2mxk8 ++ mktemp + local LAST_OUT=/tmp/tmp.dZQ3ll1rJT ++ mktemp + local LAST_ERR=/tmp/tmp.kUhq7cVbOh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-5df89bfbd-2mxk8 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dZQ3ll1rJT + cat /tmp/tmp.kUhq7cVbOh + rm /tmp/tmp.dZQ3ll1rJT /tmp/tmp.kUhq7cVbOh + return 0 2025-11-28T12:35:42.092Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.31.13-gke.1454000"} 2025-11-28T12:35:42.093Z INFO setup Manager starting up {"gitCommit": "21635d9bf2d5a9089046c1ee5f4037aef0276e07", "gitBranch": "PR-2270-21635d9b", "buildTime": "2025-11-28T11:18:12Z", "goVersion": "go1.25.4", "os": "linux", "arch": "amd64"} 2025-11-28T12:35:42.096Z INFO setup Registering Components. 2025-11-28T12:35:42.947Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2025-11-28T12:35:42.947Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2025-11-28T12:35:42.947Z INFO controller-runtime.metrics Starting metrics server 2025-11-28T12:35:42.947Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2025-11-28T12:35:42.947Z INFO controller-runtime.webhook Starting webhook server 2025-11-28T12:35:42.947Z INFO setup Starting the Cmd. 2025-11-28T12:35:42.947Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2025-11-28T12:35:42.948Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2025-11-28T12:35:42.960Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2025-11-28T12:35:43.048Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2025-11-28T12:35:43.095Z DEBUG events percona-xtradb-cluster-operator-5df89bfbd-2mxk8_779a4fa0-1f53-440a-b1a2-8c66c1a2f9cc became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"d5847d90-f458-4d40-a593-4f7066b1d77f","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1764333343083967009"}, "reason": "LeaderElection"} 2025-11-28T12:35:43.095Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2025-11-28T12:35:43.095Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2025-11-28T12:35:43.095Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2025-11-28T12:35:43.096Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2025-11-28T12:35:43.096Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2025-11-28T12:35:43.196Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2025-11-28T12:35:43.196Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2025-11-28T12:35:43.196Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2025-11-28T12:35:43.196Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2025-11-28T12:35:43.197Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2025-11-28T12:35:43.197Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2025-11-28T12:36:29.876Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "4b2ad7ab-3bb9-4b69-9b0d-1d11af61ca11", "version": "1.19.0"} 2025-11-28T12:36:30.238Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "4b2ad7ab-3bb9-4b69-9b0d-1d11af61ca11", "object": "auto-some-name-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2025-11-28T12:36:30.365Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "4b2ad7ab-3bb9-4b69-9b0d-1d11af61ca11", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-28T12:36:30.456Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "4b2ad7ab-3bb9-4b69-9b0d-1d11af61ca11", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2025-11-28T12:36:30.560Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "4b2ad7ab-3bb9-4b69-9b0d-1d11af61ca11", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-28T12:36:30.629Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "4b2ad7ab-3bb9-4b69-9b0d-1d11af61ca11", "object": "some-name-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-28T12:36:30.677Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "4b2ad7ab-3bb9-4b69-9b0d-1d11af61ca11", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-28T12:36:30.813Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "4b2ad7ab-3bb9-4b69-9b0d-1d11af61ca11", "object": "some-name-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2025-11-28T12:36:31.637Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "eeb977c2-d7d7-497a-9e66-c481d9f8dedf", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-28T12:36:31.661Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "eeb977c2-d7d7-497a-9e66-c481d9f8dedf", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2025-11-28T12:37:54.264Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "6f71b029-f3f0-4a86-ba70-6854d5aed40c", "user": "operator"} 2025-11-28T12:37:54.316Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "6f71b029-f3f0-4a86-ba70-6854d5aed40c", "user": "monitor"} 2025-11-28T12:37:54.376Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "6f71b029-f3f0-4a86-ba70-6854d5aed40c"} 2025-11-28T12:37:54.410Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "6f71b029-f3f0-4a86-ba70-6854d5aed40c"} 2025-11-28T12:37:54.445Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "6f71b029-f3f0-4a86-ba70-6854d5aed40c", "user": "xtrabackup"} 2025-11-28T12:37:54.488Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "6f71b029-f3f0-4a86-ba70-6854d5aed40c"} 2025-11-28T12:37:54.527Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "6f71b029-f3f0-4a86-ba70-6854d5aed40c", "user": "replication"} 2025-11-28T12:37:54.536Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "6f71b029-f3f0-4a86-ba70-6854d5aed40c", "err": "get primary pxc pod: not found"} 2025-11-28T12:37:59.163Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "e6388208-1e4d-457d-9774-b6532b9b36fe", "err": "get primary pxc pod: not found"} 2025-11-28T12:40:16.051Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "8b2b5697-e1d0-4989-ae8d-a214e8456605", "user": "root"} 2025-11-28T12:40:16.181Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "8b2b5697-e1d0-4989-ae8d-a214e8456605", "new version": "8.0.43-34.1"} 2025-11-28T12:40:18.880Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "8b2b5697-e1d0-4989-ae8d-a214e8456605"} 2025-11-28T12:40:23.764Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "2d1422e9-c74d-43c5-b835-2b7fd0faa1a3"} 2025-11-28T12:40:29.186Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "b29fa3c0-5ad7-4dec-ad75-3321e5f829a5"} 2025-11-28T12:40:34.190Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "74784cb2-c2e4-4e6f-81cc-95daa175eecc"} 2025-11-28T12:40:40.071Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "e70f44a3-469a-4047-94c8-19d9c4948e76"} 2025-11-28T12:40:45.096Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "8eb7c6a7-223e-429d-8ede-eb785cd8b3dc"} 2025-11-28T12:40:50.456Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "7798b61d-bbc2-41aa-830f-06d2c5edda8c"} 2025-11-28T12:40:56.262Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "95be8f2f-506d-4578-9358-9f5249aa244d"} 2025-11-28T12:41:01.368Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "3d737b74-92a4-45f1-bc9e-ad9f8d1851cd"} 2025-11-28T12:41:06.892Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "7b2a84dd-8483-4932-ad71-b0ea7166b159"} 2025-11-28T12:41:12.141Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "d36fc069-a20e-44ed-a770-1f439628f7a9"} 2025-11-28T12:41:17.384Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "64e0ff9e-9e47-494e-9b13-6c1f56d8b9dc"} 2025-11-28T12:41:22.859Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "564d19a9-337b-44a4-928d-f9fa663ba59a"} 2025-11-28T12:41:28.162Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "7c116bba-ec97-44f1-9797-c3ea2c9f673d"} 2025-11-28T12:41:33.469Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "4d034182-b8a2-4794-8988-4ac4a6b7c74b"} 2025-11-28T12:41:39.278Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "f4ed0140-3a30-4c83-acc2-2231b51a5e33"} 2025-11-28T12:41:44.144Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "fcf48586-9cc9-477f-b817-73dfc27b8968"} 2025-11-28T12:41:49.357Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "b102ff86-72d8-4398-8a43-3c8ee208e3d0"} 2025-11-28T12:41:55.449Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "cc9183d5-e039-4487-8112-0f15044c3755"} 2025-11-28T12:42:00.098Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "e30aad9b-9e94-4d6d-b154-606d27f4ccb0"} 2025-11-28T12:42:05.483Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "5dcef2b8-1265-4801-8243-621faf1d743d"} 2025-11-28T12:42:11.083Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "34b47431-56ae-4079-8ff2-0c72f70fd5f4"} 2025-11-28T12:42:16.469Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "4c5d7ad6-9ff9-4e51-98f2-97396e7bef2e"} 2025-11-28T12:42:21.749Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "101e5933-8dbf-485c-8fec-40766982f28b"} 2025-11-28T12:42:27.147Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "ea4aee17-506b-4f28-8981-3824f6d26f7f"} 2025-11-28T12:42:32.385Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "5ae0ba9a-0ce2-4ef6-a370-ff2d94664b20"} 2025-11-28T12:42:37.852Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "39b4bbc3-bf2b-4564-b700-ee711d52831d"} 2025-11-28T12:42:43.193Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "83cbbd50-78a4-4466-9bb2-e10ecf072cec"} 2025-11-28T12:42:48.664Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "11a1dd7d-5f9f-41b4-8b29-cc31a508332c"} 2025-11-28T12:42:54.281Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "7b5c7cd1-8e14-47df-b82d-6316c165ee0b"} 2025-11-28T12:42:59.260Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "a5a6d762-fb36-4775-9b70-8cdf7c2b2d14"} 2025-11-28T12:43:05.064Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "91cd8b22-f142-43f9-8d11-bc5ce5bf9dbd"} 2025-11-28T12:43:10.878Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "bf13d47b-eae9-4fe5-902d-b32ece7c7162"} 2025-11-28T12:43:16.347Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "f3246e73-0446-4a3f-8882-ae3a23714dfb"} 2025-11-28T12:43:21.568Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "799d3ca7-cd96-4704-9e94-608c4e58cbcd"} 2025-11-28T12:43:26.957Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "518286a5-b93d-4b70-85d0-08e7b7cbf357"} 2025-11-28T12:43:32.452Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "0e38ffd3-35dd-4214-9960-b895c09abe79"} 2025-11-28T12:43:37.599Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "621fb6b6-623f-4eeb-9eee-08d460ccf5f2"} 2025-11-28T12:43:42.889Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "76b0652f-010e-4c07-a514-c3afbd06a55b"} 2025-11-28T12:43:48.262Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "46873fbc-4564-4839-a076-2d5a133227b6"} 2025-11-28T12:43:54.159Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "35aa58e7-d073-47fb-89b5-8c1ec1c6ee79"} 2025-11-28T12:43:58.688Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "05468d43-de7e-49cf-bf01-a65d724a157c"} 2025-11-28T12:44:04.344Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "a30dcb2d-2659-4066-a98b-d7c923a49f9c"} 2025-11-28T12:44:08.834Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "6b138ced-8baa-466a-8d4a-236c8825e669", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:44:08.888Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "6b138ced-8baa-466a-8d4a-236c8825e669", "object": "some-name-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:44:09.586Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "8a4e9be8-96c1-4e85-8017-c4f89c240da2"} 2025-11-28T12:46:42.079Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "ed1841f3-0c6d-4f72-88ce-c742560bb19f"} 2025-11-28T12:46:47.390Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "5e1ccdfa-7eed-49b7-8b15-229e23cf9aa2"} 2025-11-28T12:46:52.973Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "d9062dbf-b8cd-4d46-ba4c-2e4e48fea78c"} 2025-11-28T12:46:58.644Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "5a93d827-6ca3-4da1-ae18-f40fec03130a"} 2025-11-28T12:47:03.583Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "ddb52ce6-63c7-49e8-866e-e26b5b913f95"} 2025-11-28T12:47:09.494Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "7d0364be-c481-4e6c-93e1-e360b44f03b1"} 2025-11-28T12:47:14.572Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "7a459a1c-45b0-4aaa-9cf8-22eb09d8e894"} 2025-11-28T12:47:19.840Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "b53dbe66-5e5a-409f-aa3b-42c2a6fe512b"} 2025-11-28T12:47:25.485Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "373a7279-4bf9-4871-9776-0516819be787"} 2025-11-28T12:47:31.080Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "592a9f1c-0802-427a-a3a4-6524d475a4a4"} 2025-11-28T12:47:36.757Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "7db8e85f-1944-4274-8be8-187030395572"} 2025-11-28T12:47:42.041Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "6ef032b1-c23f-46a1-88b6-3960ecefec73"} 2025-11-28T12:47:47.263Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "dfdec22f-32db-4b07-8c05-2f849da281aa"} 2025-11-28T12:47:53.110Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "067ae902-dba0-4e2c-85eb-ea96a759a1f3"} 2025-11-28T12:47:58.382Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "f6eefdf1-7e48-4334-88e6-b6927aa1fb95"} 2025-11-28T12:48:03.854Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "cbfec81d-54aa-4890-abd4-02f4c575490d"} 2025-11-28T12:48:09.741Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "905fd83b-1adc-4538-928c-272fa09f6c9c"} 2025-11-28T12:48:14.893Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "1cf5ed6e-990a-4325-958d-76d09a4262c1"} 2025-11-28T12:48:20.187Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "bc2dce65-2255-4496-ab30-fab34a6f8fe2"} 2025-11-28T12:48:25.751Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "2fb16b67-bf32-49e2-a9e0-5f67c2be2a8b"} 2025-11-28T12:48:31.144Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "aada515c-449e-48cc-b312-72df43372c0b"} 2025-11-28T12:48:36.648Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "07a9c888-194f-4169-9b2c-05ab8834aca1"} 2025-11-28T12:48:42.082Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "23d684fa-72cb-438a-91d7-ac50f7e42673"} 2025-11-28T12:48:46.154Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "ae6249a9-2b3f-4905-bbf7-0545ae08f2ea", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:48:46.208Z DEBUG Updating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "ae6249a9-2b3f-4905-bbf7-0545ae08f2ea", "object": "some-name-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}", "hashChanged": true, "metaChanged": true} 2025-11-28T12:48:47.842Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "b75aee75-c678-4ac6-b95d-d1f5c314ca58"} 2025-11-28T12:49:16.798Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "f33fad82-b82d-44bf-9d61-d5e4cc722e01", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:975\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:854\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1693"} 2025-11-28T12:49:26.979Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "1725806b-5a93-4baa-bb30-464e0dc95734"} 2025-11-28T12:49:34.911Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "e627a845-363a-4c50-9434-229100359c90"} 2025-11-28T12:49:40.865Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "fae54dce-c085-45b9-9d37-de89646434b7"} 2025-11-28T12:49:45.708Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "9af66c83-70b1-4859-bb7d-bd1cc9371c14"} 2025-11-28T12:49:51.174Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "72640715-a8db-41be-953f-6253774154c5"} 2025-11-28T12:49:57.569Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "82f59d4e-9884-4551-b050-29a8f268b566"} 2025-11-28T12:50:02.871Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "5235d945-dea3-404a-a78d-a745fa518fe1"} 2025-11-28T12:50:08.405Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "1eaa5ecc-92b9-4c7b-8c98-f7fbb42034eb"} 2025-11-28T12:50:13.806Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "9f6abd38-3041-40d7-82ed-060ba3e2d69d"} 2025-11-28T12:50:19.195Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "ab01d2f9-6b77-4ba8-9dc9-f1551501cb9d"} 2025-11-28T12:50:24.488Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "0f6068dc-b615-488a-8a94-51fcc27f572d"} 2025-11-28T12:50:29.988Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "4b080781-07e6-4f14-9e67-4cd96f5d1f7f"} 2025-11-28T12:50:35.290Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "ab52d34f-3243-46e9-9424-6fbd104a1051"} 2025-11-28T12:50:40.765Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "17362dcd-db12-4abe-b2f9-0648e10b2959"} 2025-11-28T12:50:45.998Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "152fd901-2c6f-44c0-a155-f7841ab68de4"} 2025-11-28T12:50:51.454Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "e4cd9854-8759-412f-8607-65bd2893f70d"} 2025-11-28T12:50:57.581Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "d93acb7f-43d7-4079-8134-2c82d89c5a4e"} 2025-11-28T12:51:02.779Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "8af676cd-44c2-48ed-9e69-9f4edfe413ff"} 2025-11-28T12:51:08.019Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "f5bdaf1d-c023-4a77-ae56-190c23647c50"} 2025-11-28T12:51:13.492Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "afd76f85-f59b-40a3-82f2-7b782e5ec293"} 2025-11-28T12:51:18.891Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"some-name","namespace":"proxysql-scheduler-31142"}, "namespace": "proxysql-scheduler-31142", "name": "some-name", "reconcileID": "967fa718-6e37-49a8-a0f3-04e27717b2ec"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:856 -  }, -  { -  }, +  }, -  Annotations: map[string]string{ +  Annotations: map[string]string{ -  APIVersion: "apps/v1", -  APIVersion: "apps/v1", +  AvailableReplicas: 0, -  AvailableReplicas: 2, -  AvailableReplicas: 3, -  CollisionCount: &0, +  CollisionCount: nil, +  CreationTimestamp: v1.Time{}, -  CreationTimestamp: v1.Time{Time: s"2025-11-28 12:36:30 +0000 UTC"}, +  CurrentReplicas: 0, -  CurrentReplicas: 2, -  CurrentReplicas: 3, +  CurrentRevision: "", -  CurrentRevision: "some-name-proxysql-85dc998f97", -  CurrentRevision: "some-name-pxc-6d4b9965f6", -  DefaultMode: &420, -  DefaultMode: &420, +  DefaultMode: nil, +  DefaultMode: nil, +  DeprecatedServiceAccount: "", -  DeprecatedServiceAccount: "default", +  DNSPolicy: "", -  DNSPolicy: "ClusterFirst", -  FieldsType: "FieldsV1", -  FieldsType: "FieldsV1", -  FieldsV1: s`{"f:metadata":{"f:annotations":{".":{},"f:percona.com/last-confi`..., -  FieldsV1: s`{"f:status":{"f:availableReplicas":{},"f:collisionCount":{},"f:c`..., +  Generation: 0, -  Generation: 1, +  ManagedFields: nil, -  ManagedFields: []v1.ManagedFieldsEntry{ -  Manager: "kube-controller-manager", -  Manager: "percona-xtradb-cluster-operator", +  ObservedGeneration: 0, -  ObservedGeneration: 1, -  Operation: "Update", -  Operation: "Update", -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Miwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., -  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6Mywic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHJveHlzcWwiLCJhcHAua3ViZXJuZXRlcy5pby9pbnN0YW5jZSI6InNvbWUtbmFtZSIsImFwcC5rdWJlcm5ldGVzLmlvL21hbmFnZWQtYnkiOiJwZXJjb25hLXh0cmFkYi1jbHVzdGVyLW9wZXJhdG9yIiwiYXBw"..., +  "percona.com/last-config-hash": "eyJyZXBsaWNhcyI6NSwic2VsZWN0b3IiOnsibWF0Y2hMYWJlbHMiOnsiYXBwLmt1YmVybmV0ZXMuaW8vY29tcG9uZW50IjoicHhjIiwiYXBwLmt1YmVybmV0ZXMuaW8vaW5zdGFuY2UiOiJzb21lLW5hbWUiLCJhcHAua3ViZXJuZXRlcy5pby9tYW5hZ2VkLWJ5IjoicGVyY29uYS14dHJhZGItY2x1c3Rlci1vcGVyYXRvciIsImFwcC5rdWJl"..., +  PeriodSeconds: 0, -  PeriodSeconds: 10, +  PersistentVolumeClaimRetentionPolicy: nil, -  PersistentVolumeClaimRetentionPolicy: s"&StatefulSetPersistentVolumeClaimRetentionPolicy{WhenDeleted:Retain,WhenScaled:Retain,}", +  Phase: "", -  Phase: "Pending", +  PodManagementPolicy: "", -  PodManagementPolicy: "OrderedReady", +  Protocol: "", -  Protocol: "TCP", +  ReadyReplicas: 0, -  ReadyReplicas: 2, -  ReadyReplicas: 3, +  Replicas: 0, -  Replicas: 2, -  Replicas: &2, -  Replicas: 3, -  Replicas: &3, +  Replicas: &3, +  Replicas: &5, +  ResourceVersion: "", -  ResourceVersion: "1764333428377119020", -  ResourceVersion: "1764333615145007020", +  RestartPolicy: "", -  RestartPolicy: "Always", -  RevisionHistoryLimit: &10, +  RevisionHistoryLimit: nil, +  SchedulerName: "", -  SchedulerName: "default-scheduler", -  Subresource: "status", +  TerminationMessagePath: "", -  TerminationMessagePath: "/dev/termination-log", +  TerminationMessagePolicy: "", -  TerminationMessagePolicy: "File", -  Time: s"2025-11-28 12:36:30 +0000 UTC", -  Time: s"2025-11-28 12:37:08 +0000 UTC", -  Time: s"2025-11-28 12:40:15 +0000 UTC", -  TopologySpreadConstraints: nil, +  TopologySpreadConstraints: []v1.TopologySpreadConstraint{}, +  UID: "", -  UID: "16400c7d-3d0d-4ef3-b169-c089ec525198", -  UID: "767e0cae-1f19-4d7c-a46b-154913040b88", +  UpdatedReplicas: 0, -  UpdatedReplicas: 2, -  UpdatedReplicas: 3, +  UpdateRevision: "", -  UpdateRevision: "some-name-proxysql-85dc998f97", -  UpdateRevision: "some-name-pxc-6d4b9965f6", -  VolumeMode: &"Filesystem", +  VolumeMode: nil,   }    },    },    {    },    },    {    },    },    {    },    ... // 16 identical fields    ... // 16 identical fields    ... // 22 identical fields    ... // 2 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 3 identical fields    ... // 4 identical fields    ... // 5 identical fields    ... // 5 identical fields    ... // 6 identical fields    ... // 7 identical fields    ... // 8 identical fields    ... // 9 identical fields    AccessModes: nil,    ActiveDeadlineSeconds: nil,    Affinity: nil,    Args: {"mysqld"},    Args: {"proxysql", "-f", "-c", "/etc/proxysql/proxysql.cnf", ...},    AutomountServiceAccountToken: nil,    AWSElasticBlockStore: nil,    AzureFile: nil,    Capacity: nil,    Conditions: nil,    ConfigMap: &v1.ConfigMapVolumeSource{    ContainerPort: 3306,    ContainerPort: 33060,    ContainerPort: 33062,    ContainerPort: 4444,    ContainerPort: 4567,    ContainerPort: 4568,    ContainerPort: 6032,    ContainerPort: 6070,    Containers: []v1.Container{    DataSource: nil,    DataSourceRef: nil,    DeletionGracePeriodSeconds: nil,    DeletionTimestamp: nil,    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-proxysql"}, Optional: &true}}},    EnvFrom: {{SecretRef: &{LocalObjectReference: {Name: "some-name-env-vars-pxc"}, Optional: &true}}},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc"}, {Name: "OPERATOR_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "operator"}}}, {Name: "PROXY_ADMIN_USER", Value: "proxyadmin"}, {Name: "PROXY_ADMIN_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "proxyadmin"}}}, ...},    Env: {{Name: "PXC_SERVICE", Value: "some-name-pxc-unready"}, {Name: "MONITOR_HOST", Value: "%"}, {Name: "MYSQL_ROOT_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "root"}}}, {Name: "XTRABACKUP_PASSWORD", ValueFrom: &{SecretKeyRef: &{LocalObjectReference: {Name: "internal-some-name"}, Key: "xtrabackup"}}}, ...},    EphemeralContainers: nil,    FailureThreshold: 3,    FC: nil,    Finalizers: nil,    GitRepo: nil,    HostAliases: nil,    HostIP: "",    HostPort: 0,    ImagePullPolicy: "Always",    InitContainers: []v1.Container{    InitialDelaySeconds: 300,    ISCSI: nil,    Items: nil,    Items: nil,    Labels: nil,    Lifecycle: nil,    LivenessProbe: &v1.Probe{    LocalObjectReference: {Name: "auto-some-name-pxc"},    LocalObjectReference: {Name: "some-name-pxc"},    MinReadySeconds: 0,    Name: "auto-config",    {Name: "bin", VolumeSource: {EmptyDir: &{}}},    Name: "config",    Name: "ist",    Name: "mysql",    Name: "mysql-admin",    Name: "mysql-init-file",    Name: "mysql-users-secret-file",    Name: "mysqlx",    Name: "proxyadm",    Namespace: "proxysql-scheduler-31142",    Name: "ssl",    Name: "ssl-internal",    Name: "sst",    Name: "stats",    {Name: "tmp", VolumeSource: {EmptyDir: &{}}},    Name: "vault-keyring-secret",    Name: "write-set",    NFS: nil,    NodeName: "",    NodeSelector: nil,    ObjectMeta: {Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "proxysql", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}, Annotations: {"kubectl.kubernetes.io/default-container": "pxc", "percona.com/configuration-hash": "d41d8cd98f00b204e9800998ecf8427e", "percona.com/ssl-hash": "cfd6a52398268173b51d7cdb1331c09a", "percona.com/ssl-internal-hash": "751e59a1c1d94ac67f9d6bb6e756acdc"}},    ObjectMeta: {Name: "datadir", Labels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: {Name: "proxydata", Labels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    ObjectMeta: v1.ObjectMeta{    Optional: &false,    Optional: &true,    Optional: &true,    Ordinals: nil,    OS: nil,    Overhead: nil,    OwnerReferences: {{APIVersion: "pxc.percona.com/v1", Kind: "PerconaXtraDBCluster", Name: "some-name", UID: "457a008e-dfac-4e2f-b8e3-0e0a9694b33a", ...}},    Ports: []v1.ContainerPort{    PreemptionPolicy: nil,    ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/liveness-check.sh"}}},    Quobyte: nil,    ReadinessProbe: &{ProbeHandler: {Exec: &{Command: {"/var/lib/mysql/readiness-check.sh"}}}, InitialDelaySeconds: 15, TimeoutSeconds: 15, PeriodSeconds: 30, ...},    SecretName: "internal-some-name",    SecretName: "some-name-mysql-init",    SecretName: "some-name-ssl",    SecretName: "some-name-ssl-internal",    SecretName: "some-name-vault",    Secret: &v1.SecretVolumeSource{    SecurityContext: nil,    Selector: &{MatchLabels: {"app.kubernetes.io/component": "proxysql", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    Selector: &{MatchLabels: {"app.kubernetes.io/component": "pxc", "app.kubernetes.io/instance": "some-name", "app.kubernetes.io/managed-by": "percona-xtradb-cluster-operator", "app.kubernetes.io/name": "percona-xtradb-cluster", ...}},    SelfLink: "",    ServiceAccountName: "default",    ServiceName: "some-name-proxysql-unready",    ServiceName: "some-name-pxc",    SetHostnameAsFQDN: nil,    Spec: v1.PersistentVolumeClaimSpec{    Spec: v1.PodSpec{    Spec: v1.StatefulSetSpec{    StartupProbe: nil,    Status: v1.PersistentVolumeClaimStatus{    Status: v1.StatefulSetStatus{    StorageClassName: nil,    Subdomain: "",    SuccessThreshold: 1,    Template: v1.PodTemplateSpec{    TerminationGracePeriodSeconds: &30,    TerminationGracePeriodSeconds: &600,    TerminationGracePeriodSeconds: nil,    TimeoutSeconds: 5,    Tolerations: nil,    TypeMeta: {},    TypeMeta: {Kind: "StatefulSet", APIVersion: "apps/v1"},    UpdateStrategy: {Type: "RollingUpdate", RollingUpdate: &{Partition: &0}},   &v1.StatefulSet{    VolumeAttributesClassName: nil,    VolumeClaimTemplates: []v1.PersistentVolumeClaim{    VolumeDevices: nil,    VolumeMounts: {{Name: "datadir", MountPath: "/var/lib/mysql"}, {Name: "config", MountPath: "/etc/percona-xtradb-cluster.conf.d"}, {Name: "tmp", MountPath: "/tmp"}, {Name: "ssl", MountPath: "/etc/mysql/ssl"}, ...},    VolumeName: "",    VolumeSource: v1.VolumeSource{    Volumes: []v1.Volume{    VsphereVolume: nil,    WorkingDir: "", + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n proxysql-scheduler-31142 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.oIKURwCh46 ++ mktemp + local LAST_ERR=/tmp/tmp.xSW2Igx4um + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oIKURwCh46 perconaxtradbcluster.pxc.percona.com "some-name" deleted from proxysql-scheduler-31142 namespace + cat /tmp/tmp.xSW2Igx4um + rm /tmp/tmp.oIKURwCh46 /tmp/tmp.xSW2Igx4um + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.qkBmgwfUWl ++ mktemp + local LAST_ERR=/tmp/tmp.ShrcKirHDu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qkBmgwfUWl No resources found + cat /tmp/tmp.ShrcKirHDu + rm /tmp/tmp.qkBmgwfUWl /tmp/tmp.ShrcKirHDu + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.WIdbgR2nDm ++ mktemp + local LAST_ERR=/tmp/tmp.5RdhFhVIdH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WIdbgR2nDm No resources found + cat /tmp/tmp.5RdhFhVIdH + rm /tmp/tmp.WIdbgR2nDm /tmp/tmp.5RdhFhVIdH + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.BTyxOntpRw ++ mktemp + local LAST_ERR=/tmp/tmp.3kd6qr4SXC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BTyxOntpRw validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.3kd6qr4SXC + rm /tmp/tmp.BTyxOntpRw /tmp/tmp.3kd6qr4SXC + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.18.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace proxysql-scheduler-31142 + rm -rf /tmp/tmp.623IQJtkKw ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.fPj2p4kKOY + local LAST_OUT=/tmp/tmp.KmiNYiPeiQ ++ mktemp + local LAST_ERR=/tmp/tmp.iasyumHrbO + local exit_status=0 ++ mktemp + local LAST_ERR=/tmp/tmp.AjyPPrCqkn + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace proxysql-scheduler-31142 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator