Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/logs/proxy-switch-8-0.log Warning: version difference between client (1.36) and server (1.33) exceeds the supported minor version skew of +/-1 Warning: version difference between client (1.36) and server (1.33) exceeds the supported minor version skew of +/-1 + main + create_infra proxy-switch-28565 + local ns=proxy-switch-28565 + '[' -n pxc-operator ']' + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + kubectl patch pxc -n proxy-switch-31565 proxy-switch --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/proxy-switch patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.IEAp0DSKQR ++ mktemp + local LAST_ERR=/tmp/tmp.Y4fBXBWLNN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IEAp0DSKQR perconaxtradbcluster.pxc.percona.com "proxy-switch" deleted from proxy-switch-31565 namespace + cat /tmp/tmp.Y4fBXBWLNN + rm /tmp/tmp.IEAp0DSKQR /tmp/tmp.Y4fBXBWLNN + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Ve7enfA014 ++ mktemp + local LAST_ERR=/tmp/tmp.hcG44G4NEj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ve7enfA014 No resources found + cat /tmp/tmp.hcG44G4NEj + rm /tmp/tmp.Ve7enfA014 /tmp/tmp.hcG44G4NEj + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.3yQJeiorks ++ mktemp + local LAST_ERR=/tmp/tmp.i0BkttN7Bx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3yQJeiorks No resources found + cat /tmp/tmp.i0BkttN7Bx + rm /tmp/tmp.3yQJeiorks /tmp/tmp.i0BkttN7Bx + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl api-resources ++ awk '{print $1}' ++ grep chaos-mesh.org ++ awk '{print $1}' ++ kubectl get crd + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ grep chaos-mesh ++ kubectl get clusterrole ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + awk '{print$1}' + kubectl_bin get ns + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.AW74fSqG4z ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.aIa8CKuFwZ + local exit_status=0 + local LAST_OUT=/tmp/tmp.lP27OGsdKz ++ mktemp ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + local LAST_ERR=/tmp/tmp.1agyVuu5Ap + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lP27OGsdKz + cat /tmp/tmp.1agyVuu5Ap + rm /tmp/tmp.lP27OGsdKz /tmp/tmp.1agyVuu5Ap + return 0 namespace "proxy-switch-31565" deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AW74fSqG4z namespace "pxc-operator" deleted + cat /tmp/tmp.aIa8CKuFwZ + rm /tmp/tmp.AW74fSqG4z /tmp/tmp.aIa8CKuFwZ + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'waiting for namespace/pxc-operator to be deleted' waiting for namespace/pxc-operator to be deleted+ set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.o5yULJHqEe ++ mktemp + local LAST_ERR=/tmp/tmp.hO9qSFnMAp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.o5yULJHqEe namespace/pxc-operator created + cat /tmp/tmp.hO9qSFnMAp + rm /tmp/tmp.o5yULJHqEe /tmp/tmp.hO9qSFnMAp + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.6PO7ZpcH75 +++ mktemp ++ local LAST_ERR=/tmp/tmp.psXYJr5ziP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6PO7ZpcH75 ++ cat /tmp/tmp.psXYJr5ziP ++ rm /tmp/tmp.6PO7ZpcH75 /tmp/tmp.psXYJr5ziP ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2476-a8b01a39-5-cluster5 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.ngJkOynFZN ++ mktemp + local LAST_ERR=/tmp/tmp.iJ7kj27Vy6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2476-a8b01a39-5-cluster5 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ngJkOynFZN Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2476-a8b01a39-5-cluster5" modified. + cat /tmp/tmp.iJ7kj27Vy6 + rm /tmp/tmp.ngJkOynFZN /tmp/tmp.iJ7kj27Vy6 + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.DaUrquYfQS ++ mktemp + local LAST_ERR=/tmp/tmp.7DWtzzkkW1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DaUrquYfQS customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.7DWtzzkkW1 + rm /tmp/tmp.DaUrquYfQS /tmp/tmp.7DWtzzkkW1 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.qEZdpDRbqn ++ mktemp + local LAST_ERR=/tmp/tmp.cZC8Wc5qLb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qEZdpDRbqn clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.cZC8Wc5qLb + rm /tmp/tmp.qEZdpDRbqn /tmp/tmp.cZC8Wc5qLb + return 0 + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "VERBOSE"' - + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-2476-a8b01a39^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "PXCO_FEATURE_GATES").value) = ""' - + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/deploy/cw-operator.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.zBw6o5rj5h ++ mktemp + local LAST_ERR=/tmp/tmp.zrNagPRurR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zBw6o5rj5h deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.zrNagPRurR + rm /tmp/tmp.zBw6o5rj5h /tmp/tmp.zrNagPRurR + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.CVa3iZjv1K ++ mktemp + local LAST_ERR=/tmp/tmp.blCggiRuib + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CVa3iZjv1K pod/percona-xtradb-cluster-operator-8548fd5788-nkq4t condition met + cat /tmp/tmp.blCggiRuib E0516 19:54:54.059315 10286 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/pxc-operator/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpercona-xtradb-cluster-operator-8548fd5788-nkq4t&resourceVersion=1778961293700258000&timeoutSeconds=408&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" + rm /tmp/tmp.CVa3iZjv1K /tmp/tmp.blCggiRuib + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator +++ mktemp ++ head -1 ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ local LAST_OUT=/tmp/tmp.JIDAdF0FIz +++ mktemp ++ local LAST_ERR=/tmp/tmp.EaBcYPdd6p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JIDAdF0FIz ++ cat /tmp/tmp.EaBcYPdd6p ++ rm /tmp/tmp.JIDAdF0FIz /tmp/tmp.EaBcYPdd6p ++ return 0 + wait_pod percona-xtradb-cluster-operator-8548fd5788-nkq4t 480 pxc-operator + local pod=percona-xtradb-cluster-operator-8548fd5788-nkq4t + local max_retry=480 + local ns=pxc-operator ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo percona-xtradb-cluster-operator-8548fd5788-nkq4t ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-8548fd5788-nkq4t condition met E0516 19:54:58.206476 10966 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/pxc-operator/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpercona-xtradb-cluster-operator-8548fd5788-nkq4t&resourceVersion=1778961296384223000&timeoutSeconds=453&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/percona-xtradb-cluster-operator-8548fd5788-nkq4t to become Ready.Ok + sleep 3 + create_namespace proxy-switch-28565 + local namespace=proxy-switch-28565 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ awk '{print $1}' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ grep validate-auth ++ awk '{print $1}' ++ kubectl get ValidatingWebhookConfiguration + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl api-resources ++ grep chaos-mesh ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ awk '{print $1}' ++ kubectl get clusterrolebinding ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + '[' -n '' ']' + desc 'cleaned up old namespaces proxy-switch-28565' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces proxy-switch-28565 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace proxy-switch-28565 + awk '{print$1}' + grep -E -v '^kube-|^default|Terminating|pxc-operator|openshift|^gke-|^gmp-|^NAME' + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.24a2NHXf8d + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.HMXD4DFVvU ++ mktemp + local LAST_ERR=/tmp/tmp.Q88rsEWPar + local exit_status=0 ++ mktemp + local LAST_ERR=/tmp/tmp.GjfFEDrPZM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxy-switch-28565 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxy-switch-28565 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HMXD4DFVvU + cat /tmp/tmp.GjfFEDrPZM + rm /tmp/tmp.HMXD4DFVvU /tmp/tmp.GjfFEDrPZM + return 0 error: resource(s) were provided, but no name was specified + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace proxy-switch-28565 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.24a2NHXf8d + cat /tmp/tmp.Q88rsEWPar Error from server (NotFound): namespaces "proxy-switch-28565" not found + rm /tmp/tmp.24a2NHXf8d /tmp/tmp.Q88rsEWPar + return 1 + : + wait_for_delete namespace/proxy-switch-28565 + local res=namespace/proxy-switch-28565 + echo -n 'waiting for namespace/proxy-switch-28565 to be deleted' waiting for namespace/proxy-switch-28565 to be deleted+ set +o xtrace Error from server (NotFound): namespaces "proxy-switch-28565" not found + desc 'create namespace proxy-switch-28565' + set +o xtrace ----------------------------------------------------------------------------------- create namespace proxy-switch-28565 ----------------------------------------------------------------------------------- + kubectl_bin create namespace proxy-switch-28565 ++ mktemp + local LAST_OUT=/tmp/tmp.oeiCfYJBMj ++ mktemp + local LAST_ERR=/tmp/tmp.5bV6o0DRkp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace proxy-switch-28565 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oeiCfYJBMj namespace/proxy-switch-28565 created + cat /tmp/tmp.5bV6o0DRkp + rm /tmp/tmp.oeiCfYJBMj /tmp/tmp.5bV6o0DRkp + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.D5x58GfvQ7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ynwnnKr83s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D5x58GfvQ7 ++ cat /tmp/tmp.ynwnnKr83s ++ rm /tmp/tmp.D5x58GfvQ7 /tmp/tmp.ynwnnKr83s ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2476-a8b01a39-5-cluster5 --namespace=proxy-switch-28565 ++ mktemp + local LAST_OUT=/tmp/tmp.rsoBaj7zxn ++ mktemp + local LAST_ERR=/tmp/tmp.qvtLuN8rs3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-2476-a8b01a39-5-cluster5 --namespace=proxy-switch-28565 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rsoBaj7zxn Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-2476-a8b01a39-5-cluster5" modified. + cat /tmp/tmp.qvtLuN8rs3 + rm /tmp/tmp.rsoBaj7zxn /tmp/tmp.qvtLuN8rs3 + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Wkz1Bj0r8z ++ mktemp + local LAST_ERR=/tmp/tmp.Y5dbT369l7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Wkz1Bj0r8z secret/minio-secret created secret/aws-s3-secret created secret/do-spaces-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.Y5dbT369l7 + rm /tmp/tmp.Wkz1Bj0r8z /tmp/tmp.Y5dbT369l7 + return 0 + desc 'create PXC cluster with HAProxy' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster with HAProxy ----------------------------------------------------------------------------------- + cluster=proxy-switch + spinup_pxc proxy-switch /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/conf/proxy-switch.yml 3 10 + local cluster=proxy-switch + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/conf/proxy-switch.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.tW6vkg6qiE ++ mktemp + local LAST_ERR=/tmp/tmp.EnGzlhgB2x + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tW6vkg6qiE secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.EnGzlhgB2x + rm /tmp/tmp.tW6vkg6qiE /tmp/tmp.EnGzlhgB2x + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/client.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/client.yml + local pvc_name= + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/client.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/client.yml + local pvc_name= + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2476-a8b01a39#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.proxy-switch-28565~ + local LAST_OUT=/tmp/tmp.QYB05vzynT + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/conf/client.yml ++ mktemp + local LAST_ERR=/tmp/tmp.5sinzN5EBB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QYB05vzynT deployment.apps/pxc-client created + cat /tmp/tmp.5sinzN5EBB + rm /tmp/tmp.QYB05vzynT /tmp/tmp.5sinzN5EBB + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/conf/proxy-switch.yml + local config_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/conf/proxy-switch.yml + local pvc_name= + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/conf/proxy-switch.yml '' + local input_file=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/conf/proxy-switch.yml + local pvc_name= + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/fluentbit:main-logcollector#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/conf/proxy-switch.yml ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.proxy-switch-28565~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#claimName:..*-backup-pvc$#claimName: #' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_OUT=/tmp/tmp.bLlJKrIXOQ + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-2476-a8b01a39#' ++ mktemp + local LAST_ERR=/tmp/tmp.7BVyV9kWvV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bLlJKrIXOQ perconaxtradbcluster.pxc.percona.com/proxy-switch created + cat /tmp/tmp.7BVyV9kWvV + rm /tmp/tmp.bLlJKrIXOQ /tmp/tmp.7BVyV9kWvV + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy proxy-switch ++ local target_cluster=proxy-switch +++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.ydBZhsN5d4 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.uuJO33QpZY +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc proxy-switch -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.ydBZhsN5d4 +++ cat /tmp/tmp.uuJO33QpZY +++ rm /tmp/tmp.ydBZhsN5d4 /tmp/tmp.uuJO33QpZY +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo proxy-switch-haproxy ++ return + local proxy=proxy-switch-haproxy + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxy-switch-28565 ++ mktemp + local LAST_OUT=/tmp/tmp.zLw8kauCqj ++ mktemp + local LAST_ERR=/tmp/tmp.4Ge2wFelrD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxy-switch-28565 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxy-switch-28565 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n proxy-switch-28565 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.zLw8kauCqj + cat /tmp/tmp.4Ge2wFelrD error: no matching resources found + rm /tmp/tmp.zLw8kauCqj /tmp/tmp.4Ge2wFelrD + return 1 + true + wait_for_running proxy-switch-haproxy 1 + local name=proxy-switch-haproxy + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-haproxy-0 480 + local pod=proxy-switch-haproxy-0 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo proxy-switch-haproxy-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/proxy-switch-haproxy-0 condition met waiting for pod/proxy-switch-haproxy-0 to become Ready.Ok + wait_for_running proxy-switch-pxc 3 + local name=proxy-switch-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-pxc-0 480 + local pod=proxy-switch-pxc-0 + local max_retry=480 + local ns= ++ echo proxy-switch-pxc-0 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/proxy-switch-pxc-0 condition met E0516 19:57:33.191443 32042 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-pxc-0&resourceVersion=1778961451395471012&timeoutSeconds=308&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-pxc-1 480 + local pod=proxy-switch-pxc-1 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo proxy-switch-pxc-1 + local container=pxc + set +o xtrace pod/proxy-switch-pxc-1 condition met waiting for pod/proxy-switch-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-pxc-2 480 + local pod=proxy-switch-pxc-2 + local max_retry=480 + local ns= ++ echo proxy-switch-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/proxy-switch-pxc-2 condition met waiting for pod/proxy-switch-pxc-2 to become Ready.Ok + sleep 10 ++ kubectl get pxc proxy-switch -o 'jsonpath={.spec.secretsName}' + local secret_name=my-cluster-secrets ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.G5hIdmwUv2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.SCrfxVLfNP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G5hIdmwUv2 ++ cat /tmp/tmp.SCrfxVLfNP ++ rm /tmp/tmp.G5hIdmwUv2 /tmp/tmp.SCrfxVLfNP ++ return 0 + local root_pass=root_password + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h proxy-switch-haproxy -uroot -p'\''root_password'\'' -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h proxy-switch-haproxy -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kTXs54rHfj +++ mktemp ++ local LAST_ERR=/tmp/tmp.xdC6pOYECx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kTXs54rHfj ++ cat /tmp/tmp.xdC6pOYECx ++ rm /tmp/tmp.kTXs54rHfj /tmp/tmp.xdC6pOYECx ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-wq5rx + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:00:28.528498 23040 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778961625743997000&timeoutSeconds=516&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h proxy-switch-haproxy -uroot -p'\''root_password'\'' -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h proxy-switch-haproxy -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bQRQZUJkyP +++ mktemp ++ local LAST_ERR=/tmp/tmp.4sNS1WMTiZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bQRQZUJkyP ++ cat /tmp/tmp.4sNS1WMTiZ ++ rm /tmp/tmp.bQRQZUJkyP /tmp/tmp.4sNS1WMTiZ ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-wq5rx + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:00:40.531286 24895 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778961638281362000&timeoutSeconds=578&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LVCaQwa0QJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.AWQFzPQuy9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LVCaQwa0QJ ++ cat /tmp/tmp.AWQFzPQuy9 ++ rm /tmp/tmp.LVCaQwa0QJ /tmp/tmp.AWQFzPQuy9 ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-wq5rx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:01:21.336470 30763 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778961678644092000&timeoutSeconds=393&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql /tmp/tmp.he19Xv9u5M/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yUAybrLDMM +++ mktemp ++ local LAST_ERR=/tmp/tmp.2AK5V4cLTE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yUAybrLDMM ++ cat /tmp/tmp.2AK5V4cLTE ++ rm /tmp/tmp.yUAybrLDMM /tmp/tmp.2AK5V4cLTE ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-wq5rx ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:01:34.570156 387 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778961692657442000&timeoutSeconds=445&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql /tmp/tmp.he19Xv9u5M/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -p'\''root_password'\'' -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -p'\''root_password'\'' -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -p'\''root_password'\'' -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -p'\''root_password'\'' -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sEGDx7rRcS +++ mktemp ++ local LAST_ERR=/tmp/tmp.ehVSBEYk15 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sEGDx7rRcS ++ cat /tmp/tmp.ehVSBEYk15 ++ rm /tmp/tmp.sEGDx7rRcS /tmp/tmp.ehVSBEYk15 ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-wq5rx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:01:49.184954 2624 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778961706641639000&timeoutSeconds=355&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql /tmp/tmp.he19Xv9u5M/select-1.sql + is_keyring_plugin_in_use proxy-switch + local cluster=proxy-switch + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + grep -E -o 'early-plugin-load=keyring_\w+.so' + kubectl exec proxy-switch-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' + return 1 + desc 'check cluster is ready with HAProxy' + set +o xtrace ----------------------------------------------------------------------------------- check cluster is ready with HAProxy ----------------------------------------------------------------------------------- + wait_for_running proxy-switch-pxc 3 + local name=proxy-switch-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-pxc-0 480 + local pod=proxy-switch-pxc-0 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo proxy-switch-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/proxy-switch-pxc-0 condition met E0516 20:02:05.013267 4902 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-pxc-0&resourceVersion=1778961721641911000&timeoutSeconds=580&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-pxc-1 480 + local pod=proxy-switch-pxc-1 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo proxy-switch-pxc-1 ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/proxy-switch-pxc-1 condition met E0516 20:02:12.523400 6095 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-pxc-1&resourceVersion=1778961731642691000&timeoutSeconds=501&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-pxc-2 480 + local pod=proxy-switch-pxc-2 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo proxy-switch-pxc-2 + local container=pxc + set +o xtrace pod/proxy-switch-pxc-2 condition met E0516 20:02:23.630887 7491 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-pxc-2&resourceVersion=1778961741642813000&timeoutSeconds=315&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-pxc-2 to become Ready.Ok + wait_for_running proxy-switch-haproxy 3 + local name=proxy-switch-haproxy + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-haproxy-0 480 + local pod=proxy-switch-haproxy-0 + local max_retry=480 + local ns= ++ echo proxy-switch-haproxy-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/proxy-switch-haproxy-0 condition met E0516 20:02:35.336425 9169 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-haproxy-0&resourceVersion=1778961751642870000&timeoutSeconds=346&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-haproxy-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-haproxy-1 480 + local pod=proxy-switch-haproxy-1 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo proxy-switch-haproxy-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/proxy-switch-haproxy-1 condition met E0516 20:02:46.300933 10856 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-haproxy-1&resourceVersion=1778961761643085000&timeoutSeconds=347&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-haproxy-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-haproxy-2 480 + local pod=proxy-switch-haproxy-2 + local max_retry=480 + local ns= ++ echo proxy-switch-haproxy-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/proxy-switch-haproxy-2 condition met E0516 20:02:58.433333 12468 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-haproxy-2&resourceVersion=1778961776643186000&timeoutSeconds=451&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-haproxy-2 to become Ready.Ok + wait_cluster_consistency proxy-switch 3 3 + local cluster_name=proxy-switch + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/proxy-switch to be ready' waiting for pxc/proxy-switch to be ready++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e2Sczr4o4g +++ mktemp ++ local LAST_ERR=/tmp/tmp.2SpzKVeFr0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc proxy-switch -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e2Sczr4o4g ++ cat /tmp/tmp.2SpzKVeFr0 ++ rm /tmp/tmp.e2Sczr4o4g /tmp/tmp.2SpzKVeFr0 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xC73xcw3vt +++ mktemp ++ local LAST_ERR=/tmp/tmp.3Xf5qAp8t4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc proxy-switch -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xC73xcw3vt ++ cat /tmp/tmp.3Xf5qAp8t4 ++ rm /tmp/tmp.xC73xcw3vt /tmp/tmp.3Xf5qAp8t4 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine proxy-switch +++ local cluster_name=proxy-switch ++++ get_proxy proxy-switch ++++ local target_cluster=proxy-switch +++++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.PcHaLF5G8F ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.KCtT1Oqhk4 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc proxy-switch -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.PcHaLF5G8F +++++ cat /tmp/tmp.KCtT1Oqhk4 +++++ rm /tmp/tmp.PcHaLF5G8F /tmp/tmp.KCtT1Oqhk4 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo proxy-switch-haproxy ++++ return +++ local cluster_proxy=proxy-switch-haproxy +++ echo haproxy ++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HWfSoTKPKV +++ mktemp ++ local LAST_ERR=/tmp/tmp.29rVJMTstx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc proxy-switch -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HWfSoTKPKV ++ cat /tmp/tmp.29rVJMTstx ++ rm /tmp/tmp.HWfSoTKPKV /tmp/tmp.29rVJMTstx ++ return 0 + [[ 3 == \3 ]] + echo + desc 'write data and check connectivity through HAProxy' + set +o xtrace ----------------------------------------------------------------------------------- write data and check connectivity through HAProxy ----------------------------------------------------------------------------------- + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' '-h proxy-switch-haproxy -uroot -proot_password' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' + local 'uri=-h proxy-switch-haproxy -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rH0oV0yxUV +++ mktemp ++ local LAST_ERR=/tmp/tmp.61A4O926dP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rH0oV0yxUV ++ cat /tmp/tmp.61A4O926dP ++ rm /tmp/tmp.rH0oV0yxUV /tmp/tmp.61A4O926dP ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-wq5rx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:03:23.421732 16484 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778961802403818000&timeoutSeconds=329&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h proxy-switch-haproxy -uroot -proot_password' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h proxy-switch-haproxy -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jh78WKGl0h +++ mktemp ++ local LAST_ERR=/tmp/tmp.iWRQPiixd3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Jh78WKGl0h ++ cat /tmp/tmp.iWRQPiixd3 ++ rm /tmp/tmp.Jh78WKGl0h /tmp/tmp.iWRQPiixd3 ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-wq5rx + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:03:35.445679 18028 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778961813130674000&timeoutSeconds=495&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace ERROR 1062 (23000) at line 1: Duplicate entry '100500' for key 'myApp.PRIMARY' command terminated with exit code 1 + sleep 10 + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h proxy-switch-haproxy -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-haproxy -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-haproxy -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-haproxy -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GcADYm2w6z +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lj0jkYC2AM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GcADYm2w6z ++ cat /tmp/tmp.Lj0jkYC2AM ++ rm /tmp/tmp.GcADYm2w6z /tmp/tmp.Lj0jkYC2AM ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-wq5rx ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:04:03.936399 22241 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778961841805672000&timeoutSeconds=404&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql /tmp/tmp.he19Xv9u5M/select-1.sql + desc 'verify data exists on all PXC nodes' + set +o xtrace ----------------------------------------------------------------------------------- verify data exists on all PXC nodes ----------------------------------------------------------------------------------- + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RCuyXVrysm +++ mktemp ++ local LAST_ERR=/tmp/tmp.5MKqIYynGy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RCuyXVrysm ++ cat /tmp/tmp.5MKqIYynGy ++ rm /tmp/tmp.RCuyXVrysm /tmp/tmp.5MKqIYynGy ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-wq5rx + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:04:17.188845 24050 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778961853839305000&timeoutSeconds=442&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql /tmp/tmp.he19Xv9u5M/select-1.sql + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V0ASc80LCd +++ mktemp ++ local LAST_ERR=/tmp/tmp.XVRm1MliES ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V0ASc80LCd ++ cat /tmp/tmp.XVRm1MliES ++ rm /tmp/tmp.V0ASc80LCd /tmp/tmp.XVRm1MliES ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-wq5rx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:04:33.348103 26300 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778961869443255000&timeoutSeconds=528&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql /tmp/tmp.he19Xv9u5M/select-1.sql + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UjNJltaIdt +++ mktemp ++ local LAST_ERR=/tmp/tmp.jmuEGrBqM0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UjNJltaIdt ++ cat /tmp/tmp.jmuEGrBqM0 ++ rm /tmp/tmp.UjNJltaIdt /tmp/tmp.jmuEGrBqM0 ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-wq5rx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:04:49.815967 28837 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778961887126430000&timeoutSeconds=420&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql /tmp/tmp.he19Xv9u5M/select-1.sql + desc 'switch from HAProxy to ProxySQL' + set +o xtrace ----------------------------------------------------------------------------------- switch from HAProxy to ProxySQL ----------------------------------------------------------------------------------- + kubectl_bin patch pxc proxy-switch --type=json -p '[ {"op": "replace", "path": "/spec/haproxy/enabled", "value": false}, {"op": "replace", "path": "/spec/proxysql/enabled", "value": true} ]' ++ mktemp + local LAST_OUT=/tmp/tmp.UtA1bb08wY ++ mktemp + local LAST_ERR=/tmp/tmp.puWFLyRZoB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc proxy-switch --type=json -p '[ {"op": "replace", "path": "/spec/haproxy/enabled", "value": false}, {"op": "replace", "path": "/spec/proxysql/enabled", "value": true} ]' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UtA1bb08wY perconaxtradbcluster.pxc.percona.com/proxy-switch patched + cat /tmp/tmp.puWFLyRZoB + rm /tmp/tmp.UtA1bb08wY /tmp/tmp.puWFLyRZoB + return 0 + desc 'wait for ProxySQL to be ready and HAProxy to be removed' + set +o xtrace ----------------------------------------------------------------------------------- wait for ProxySQL to be ready and HAProxy to be removed ----------------------------------------------------------------------------------- + wait_for_delete sts/proxy-switch-haproxy + local res=sts/proxy-switch-haproxy + echo -n 'waiting for sts/proxy-switch-haproxy to be deleted' waiting for sts/proxy-switch-haproxy to be deleted+ set +o xtrace Error from server (NotFound): statefulsets.apps "proxy-switch-haproxy" not found + wait_for_running proxy-switch-proxysql 3 + local name=proxy-switch-proxysql + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-proxysql-0 480 + local pod=proxy-switch-proxysql-0 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo proxy-switch-proxysql-0 + local container=proxysql + set +o xtrace pod/proxy-switch-proxysql-0 condition met waiting for pod/proxy-switch-proxysql-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-proxysql-1 480 + local pod=proxy-switch-proxysql-1 + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo proxy-switch-proxysql-1 ++ grep -E '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/proxy-switch-proxysql-1 condition met waiting for pod/proxy-switch-proxysql-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-proxysql-2 480 + local pod=proxy-switch-proxysql-2 + local max_retry=480 + local ns= ++ echo proxy-switch-proxysql-2 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=proxysql + set +o xtrace pod/proxy-switch-proxysql-2 condition met waiting for pod/proxy-switch-proxysql-2 to become Ready.Ok + wait_cluster_consistency proxy-switch 3 3 + local cluster_name=proxy-switch + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/proxy-switch to be ready' waiting for pxc/proxy-switch to be ready++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MfzaWS4sCh +++ mktemp ++ local LAST_ERR=/tmp/tmp.7rgyi0YLZF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc proxy-switch -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MfzaWS4sCh ++ cat /tmp/tmp.7rgyi0YLZF ++ rm /tmp/tmp.MfzaWS4sCh /tmp/tmp.7rgyi0YLZF ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5IH0jJ1a5I +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xef3PJMh9O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc proxy-switch -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5IH0jJ1a5I ++ cat /tmp/tmp.Xef3PJMh9O ++ rm /tmp/tmp.5IH0jJ1a5I /tmp/tmp.Xef3PJMh9O ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine proxy-switch +++ local cluster_name=proxy-switch ++++ get_proxy proxy-switch ++++ local target_cluster=proxy-switch +++++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AqVlqnGFJ3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.SJ96j54nZe +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc proxy-switch -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AqVlqnGFJ3 +++++ cat /tmp/tmp.SJ96j54nZe +++++ rm /tmp/tmp.AqVlqnGFJ3 /tmp/tmp.SJ96j54nZe +++++ return 0 ++++ [[ false == \t\r\u\e ]] +++++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.9GERgOUlFb ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vuVRa4DW9x +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc proxy-switch -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.9GERgOUlFb +++++ cat /tmp/tmp.vuVRa4DW9x +++++ rm /tmp/tmp.9GERgOUlFb /tmp/tmp.vuVRa4DW9x +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo proxy-switch-proxysql ++++ return +++ local cluster_proxy=proxy-switch-proxysql +++ echo proxysql ++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zMsha99UMF +++ mktemp ++ local LAST_ERR=/tmp/tmp.GbD2WYA3yg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc proxy-switch -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zMsha99UMF ++ cat /tmp/tmp.GbD2WYA3yg ++ rm /tmp/tmp.zMsha99UMF /tmp/tmp.GbD2WYA3yg ++ return 0 + [[ 3 == \3 ]] + echo + desc 'verify HAProxy pods are deleted' + set +o xtrace ----------------------------------------------------------------------------------- verify HAProxy pods are deleted ----------------------------------------------------------------------------------- + kubectl_bin get pods -l app.kubernetes.io/name=percona-xtradb-cluster,app.kubernetes.io/instance=proxy-switch,app.kubernetes.io/component=haproxy + grep -q haproxy + desc 'verify ProxySQL service exists' + set +o xtrace ----------------------------------------------------------------------------------- verify ProxySQL service exists ----------------------------------------------------------------------------------- + kubectl_bin get service proxy-switch-proxysql ++ mktemp + local LAST_OUT=/tmp/tmp.7i2WyYDVWf ++ mktemp + local LAST_ERR=/tmp/tmp.huT0SNUovp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get service proxy-switch-proxysql + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7i2WyYDVWf NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE proxy-switch-proxysql ClusterIP 34.118.236.68 3306/TCP,33062/TCP,6070/TCP 84s + cat /tmp/tmp.huT0SNUovp + rm /tmp/tmp.7i2WyYDVWf /tmp/tmp.huT0SNUovp + return 0 + desc 'check connectivity through ProxySQL after switch' + set +o xtrace ----------------------------------------------------------------------------------- check connectivity through ProxySQL after switch ----------------------------------------------------------------------------------- + sleep 20 + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h proxy-switch-proxysql -uroot -proot_password' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-proxysql -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-proxysql -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-proxysql -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qc2aF9rzLQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.7yYoHrFHKi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qc2aF9rzLQ ++ cat /tmp/tmp.7yYoHrFHKi ++ rm /tmp/tmp.Qc2aF9rzLQ /tmp/tmp.7yYoHrFHKi ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-wq5rx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:06:50.639073 13004 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778962008609733000&timeoutSeconds=315&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-1.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-1.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-1.sql /tmp/tmp.he19Xv9u5M/select-1.sql + desc 'write new data through ProxySQL' + set +o xtrace ----------------------------------------------------------------------------------- write new data through ProxySQL ----------------------------------------------------------------------------------- + run_mysql 'INSERT myApp.myApp (id) VALUES (100501)' '-h proxy-switch-proxysql -uroot -proot_password' + local 'command=INSERT myApp.myApp (id) VALUES (100501)' + local 'uri=-h proxy-switch-proxysql -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J78l0zzqUQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.IRTFG3SNsh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J78l0zzqUQ ++ cat /tmp/tmp.IRTFG3SNsh ++ rm /tmp/tmp.J78l0zzqUQ /tmp/tmp.IRTFG3SNsh ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-wq5rx ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:06:58.358274 14142 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778962016893423000&timeoutSeconds=317&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 10 + compare_mysql_cmd select-2 'SELECT * from myApp.myApp;' '-h proxy-switch-proxysql -uroot -proot_password' + local command_id=select-2 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-proxysql -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-2-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-proxysql -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-proxysql -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cBi8tY55aB +++ mktemp ++ local LAST_ERR=/tmp/tmp.mYN8kqjnCC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cBi8tY55aB ++ cat /tmp/tmp.mYN8kqjnCC ++ rm /tmp/tmp.cBi8tY55aB /tmp/tmp.mYN8kqjnCC ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-wq5rx + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:07:14.859534 16394 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778962034007483000&timeoutSeconds=493&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-2.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-2.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-2.sql /tmp/tmp.he19Xv9u5M/select-2.sql + desc 'verify new data exists on all PXC nodes' + set +o xtrace ----------------------------------------------------------------------------------- verify new data exists on all PXC nodes ----------------------------------------------------------------------------------- + compare_mysql_cmd select-2 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -proot_password' + local command_id=select-2 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-2-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Y9khPtBe6u +++ mktemp ++ local LAST_ERR=/tmp/tmp.0JqBSjSrSe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Y9khPtBe6u ++ cat /tmp/tmp.0JqBSjSrSe ++ rm /tmp/tmp.Y9khPtBe6u /tmp/tmp.0JqBSjSrSe ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-67fc4995bb-wq5rx ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:07:25.170790 17713 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778962042857068000&timeoutSeconds=512&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-2.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-2.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-2.sql /tmp/tmp.he19Xv9u5M/select-2.sql + compare_mysql_cmd select-2 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -proot_password' + local command_id=select-2 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-2-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UBDKCBITGS +++ mktemp ++ local LAST_ERR=/tmp/tmp.9gn6BsPXwv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UBDKCBITGS ++ cat /tmp/tmp.9gn6BsPXwv ++ rm /tmp/tmp.UBDKCBITGS /tmp/tmp.9gn6BsPXwv ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-wq5rx + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:07:35.880700 19229 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778962053413204000&timeoutSeconds=445&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-2.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-2.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-2.sql /tmp/tmp.he19Xv9u5M/select-2.sql + compare_mysql_cmd select-2 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -proot_password' + local command_id=select-2 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-2-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tdPfj2TXEz +++ mktemp ++ local LAST_ERR=/tmp/tmp.wZNR1QEU8b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tdPfj2TXEz ++ cat /tmp/tmp.wZNR1QEU8b ++ rm /tmp/tmp.tdPfj2TXEz /tmp/tmp.wZNR1QEU8b ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-wq5rx ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:07:44.965529 20598 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778962063019332000&timeoutSeconds=484&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-2.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-2.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-2.sql /tmp/tmp.he19Xv9u5M/select-2.sql + desc 'verify ProxySQL is routing to primary' + set +o xtrace ----------------------------------------------------------------------------------- verify ProxySQL is routing to primary ----------------------------------------------------------------------------------- ++ get_proxy_primary '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' proxy-switch-proxysql-0 ++ local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' ++ local pod=proxy-switch-proxysql-0 +++ run_mysql_local 'SELECT hostname FROM runtime_mysql_servers WHERE hostgroup_id=11 AND status='\''ONLINE'\'';' '-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' proxy-switch-proxysql-0 proxysql +++ local 'command=SELECT hostname FROM runtime_mysql_servers WHERE hostgroup_id=11 AND status='\''ONLINE'\'';' +++ local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -padmin_password' +++ local pod=proxy-switch-proxysql-0 +++ local container_name=proxysql +++ set +o xtrace ++ local ip=proxy-switch-pxc-0.proxy-switch-pxc.proxy-switch-28565.svc.cluster.local +++ echo proxy-switch-pxc-0.proxy-switch-pxc.proxy-switch-28565.svc.cluster.local +++ wc -l ++ '[' 1 '!=' 1 ']' ++ echo proxy-switch-pxc-0.proxy-switch-pxc.proxy-switch-28565.svc.cluster.local ++ cut -d. -f1 + initial_primary=proxy-switch-pxc-0 + '[' -z proxy-switch-pxc-0 ']' + echo 'ProxySQL is routing to primary: proxy-switch-pxc-0' ProxySQL is routing to primary: proxy-switch-pxc-0 + desc 'switch back to HAProxy from ProxySQL' + set +o xtrace ----------------------------------------------------------------------------------- switch back to HAProxy from ProxySQL ----------------------------------------------------------------------------------- + kubectl_bin patch pxc proxy-switch --type=json -p '[ {"op": "replace", "path": "/spec/haproxy/enabled", "value": true}, {"op": "replace", "path": "/spec/proxysql/enabled", "value": false} ]' ++ mktemp + local LAST_OUT=/tmp/tmp.VX3yaBYbze ++ mktemp + local LAST_ERR=/tmp/tmp.kDELgt8Z1k + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc proxy-switch --type=json -p '[ {"op": "replace", "path": "/spec/haproxy/enabled", "value": true}, {"op": "replace", "path": "/spec/proxysql/enabled", "value": false} ]' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VX3yaBYbze perconaxtradbcluster.pxc.percona.com/proxy-switch patched + cat /tmp/tmp.kDELgt8Z1k + rm /tmp/tmp.VX3yaBYbze /tmp/tmp.kDELgt8Z1k + return 0 + desc 'wait for HAProxy to be ready and ProxySQL to be removed' + set +o xtrace ----------------------------------------------------------------------------------- wait for HAProxy to be ready and ProxySQL to be removed ----------------------------------------------------------------------------------- + wait_for_delete sts/proxy-switch-proxysql + local res=sts/proxy-switch-proxysql + echo -n 'waiting for sts/proxy-switch-proxysql to be deleted' waiting for sts/proxy-switch-proxysql to be deleted+ set +o xtrace .Error from server (NotFound): statefulsets.apps "proxy-switch-proxysql" not found + wait_for_running proxy-switch-haproxy 3 + local name=proxy-switch-haproxy + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-haproxy-0 480 + local pod=proxy-switch-haproxy-0 + local max_retry=480 + local ns= ++ echo proxy-switch-haproxy-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/proxy-switch-haproxy-0 condition met waiting for pod/proxy-switch-haproxy-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-haproxy-1 480 + local pod=proxy-switch-haproxy-1 + local max_retry=480 + local ns= ++ echo proxy-switch-haproxy-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/proxy-switch-haproxy-1 condition met waiting for pod/proxy-switch-haproxy-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-haproxy-2 480 + local pod=proxy-switch-haproxy-2 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo proxy-switch-haproxy-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/proxy-switch-haproxy-2 condition met waiting for pod/proxy-switch-haproxy-2 to become Ready.Ok + wait_cluster_consistency proxy-switch 3 3 + local cluster_name=proxy-switch + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/proxy-switch to be ready' waiting for pxc/proxy-switch to be ready++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n9FXrASB0A +++ mktemp ++ local LAST_ERR=/tmp/tmp.h2guUo9tzX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc proxy-switch -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n9FXrASB0A ++ cat /tmp/tmp.h2guUo9tzX ++ rm /tmp/tmp.n9FXrASB0A /tmp/tmp.h2guUo9tzX ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FcPeVqCVXs +++ mktemp ++ local LAST_ERR=/tmp/tmp.I2rJbLIfri ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc proxy-switch -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FcPeVqCVXs ++ cat /tmp/tmp.I2rJbLIfri ++ rm /tmp/tmp.FcPeVqCVXs /tmp/tmp.I2rJbLIfri ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine proxy-switch +++ local cluster_name=proxy-switch ++++ get_proxy proxy-switch ++++ local target_cluster=proxy-switch +++++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vxbOuCxYfa ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.2tFuEG2RBW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc proxy-switch -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vxbOuCxYfa +++++ cat /tmp/tmp.2tFuEG2RBW +++++ rm /tmp/tmp.vxbOuCxYfa /tmp/tmp.2tFuEG2RBW +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo proxy-switch-haproxy ++++ return +++ local cluster_proxy=proxy-switch-haproxy +++ echo haproxy ++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Evs8ubg8xK +++ mktemp ++ local LAST_ERR=/tmp/tmp.7dIco1Lxod ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc proxy-switch -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Evs8ubg8xK ++ cat /tmp/tmp.7dIco1Lxod ++ rm /tmp/tmp.Evs8ubg8xK /tmp/tmp.7dIco1Lxod ++ return 0 + [[ 3 == \3 ]] + echo + desc 'verify ProxySQL pods are deleted' + set +o xtrace ----------------------------------------------------------------------------------- verify ProxySQL pods are deleted ----------------------------------------------------------------------------------- + kubectl_bin get pods -l app.kubernetes.io/name=percona-xtradb-cluster,app.kubernetes.io/instance=proxy-switch,app.kubernetes.io/component=proxysql + grep -q proxysql + desc 'check cluster is ready with HAProxy' + set +o xtrace ----------------------------------------------------------------------------------- check cluster is ready with HAProxy ----------------------------------------------------------------------------------- + wait_for_running proxy-switch-pxc 3 + local name=proxy-switch-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-pxc-0 480 + local pod=proxy-switch-pxc-0 + local max_retry=480 + local ns= ++ echo proxy-switch-pxc-0 ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/proxy-switch-pxc-0 condition met E0516 20:09:26.544523 32509 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-pxc-0&resourceVersion=1778962164839554000&timeoutSeconds=416&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-pxc-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-pxc-1 480 + local pod=proxy-switch-pxc-1 + local max_retry=480 + local ns= ++ echo proxy-switch-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/proxy-switch-pxc-1 condition met E0516 20:09:32.108610 968 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-pxc-1&resourceVersion=1778962169839570000&timeoutSeconds=473&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-pxc-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-pxc-2 480 + local pod=proxy-switch-pxc-2 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo proxy-switch-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container=pxc + set +o xtrace pod/proxy-switch-pxc-2 condition met E0516 20:09:39.345917 1903 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-pxc-2&resourceVersion=1778962174839616000&timeoutSeconds=351&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-pxc-2 to become Ready.Ok + wait_for_running proxy-switch-haproxy 3 + local name=proxy-switch-haproxy + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-haproxy-0 480 + local pod=proxy-switch-haproxy-0 + local max_retry=480 + local ns= ++ grep -E '^(pxc|proxysql)$' ++ echo proxy-switch-haproxy-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/proxy-switch-haproxy-0 condition met E0516 20:09:46.327699 2961 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-haproxy-0&resourceVersion=1778962184839719000&timeoutSeconds=553&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-haproxy-0 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-haproxy-1 480 + local pod=proxy-switch-haproxy-1 + local max_retry=480 + local ns= ++ echo proxy-switch-haproxy-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/proxy-switch-haproxy-1 condition met E0516 20:09:53.870748 4142 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-haproxy-1&resourceVersion=1778962189839739000&timeoutSeconds=310&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-haproxy-1 to become Ready.Ok + for i in '$(seq 0 $last_pod)' + wait_pod proxy-switch-haproxy-2 480 + local pod=proxy-switch-haproxy-2 + local max_retry=480 + local ns= ++ echo proxy-switch-haproxy-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/proxy-switch-haproxy-2 condition met E0516 20:10:00.280748 5072 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dproxy-switch-haproxy-2&resourceVersion=1778962199839896000&timeoutSeconds=394&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/proxy-switch-haproxy-2 to become Ready.Ok + wait_cluster_consistency proxy-switch 3 3 + local cluster_name=proxy-switch + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=300 + sleep 7 + echo -n 'waiting for pxc/proxy-switch to be ready' waiting for pxc/proxy-switch to be ready++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y7VvbCnYnL +++ mktemp ++ local LAST_ERR=/tmp/tmp.P6JmuIJcVL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc proxy-switch -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y7VvbCnYnL ++ cat /tmp/tmp.P6JmuIJcVL ++ rm /tmp/tmp.y7VvbCnYnL /tmp/tmp.P6JmuIJcVL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J0twcW1pJt +++ mktemp ++ local LAST_ERR=/tmp/tmp.JM3rgQA1r3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc proxy-switch -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J0twcW1pJt ++ cat /tmp/tmp.JM3rgQA1r3 ++ rm /tmp/tmp.J0twcW1pJt /tmp/tmp.JM3rgQA1r3 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine proxy-switch +++ local cluster_name=proxy-switch ++++ get_proxy proxy-switch ++++ local target_cluster=proxy-switch +++++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GJJpS3I7cY ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.agD3RiQtx4 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc proxy-switch -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GJJpS3I7cY +++++ cat /tmp/tmp.agD3RiQtx4 +++++ rm /tmp/tmp.GJJpS3I7cY /tmp/tmp.agD3RiQtx4 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo proxy-switch-haproxy ++++ return +++ local cluster_proxy=proxy-switch-haproxy +++ echo haproxy ++ kubectl_bin get pxc proxy-switch -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iwahYdG4Q3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PiZNf6is6i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc proxy-switch -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iwahYdG4Q3 ++ cat /tmp/tmp.PiZNf6is6i ++ rm /tmp/tmp.iwahYdG4Q3 /tmp/tmp.PiZNf6is6i ++ return 0 + [[ 3 == \3 ]] + echo + desc 'verify HAProxy service exists' + set +o xtrace ----------------------------------------------------------------------------------- verify HAProxy service exists ----------------------------------------------------------------------------------- + kubectl_bin get service proxy-switch-haproxy ++ mktemp + local LAST_OUT=/tmp/tmp.CK4ULZXcQk ++ mktemp + local LAST_ERR=/tmp/tmp.w5hBn7KCMD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get service proxy-switch-haproxy + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CK4ULZXcQk NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE proxy-switch-haproxy ClusterIP 34.118.236.165 3306/TCP,3309/TCP,33062/TCP,33060/TCP,8404/TCP 2m29s + cat /tmp/tmp.w5hBn7KCMD + rm /tmp/tmp.CK4ULZXcQk /tmp/tmp.w5hBn7KCMD + return 0 + desc 'write data and check connectivity through HAProxy' + set +o xtrace ----------------------------------------------------------------------------------- write data and check connectivity through HAProxy ----------------------------------------------------------------------------------- + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' '-h proxy-switch-haproxy -uroot -proot_password' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY);' + local 'uri=-h proxy-switch-haproxy -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8H29mXPrOj +++ mktemp ++ local LAST_ERR=/tmp/tmp.yuAKirQ5Km ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8H29mXPrOj ++ cat /tmp/tmp.yuAKirQ5Km ++ rm /tmp/tmp.8H29mXPrOj /tmp/tmp.yuAKirQ5Km ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' ++ echo pxc-client-67fc4995bb-wq5rx + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:10:24.893029 8805 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778962222860010000&timeoutSeconds=503&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100502)' '-h proxy-switch-haproxy -uroot -proot_password' + local 'command=INSERT myApp.myApp (id) VALUES (100502)' + local 'uri=-h proxy-switch-haproxy -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pe4hP9W3ck +++ mktemp ++ local LAST_ERR=/tmp/tmp.hl3fWDXNMx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pe4hP9W3ck ++ cat /tmp/tmp.hl3fWDXNMx ++ rm /tmp/tmp.pe4hP9W3ck /tmp/tmp.hl3fWDXNMx ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-wq5rx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:10:37.165156 10524 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778962234288192000&timeoutSeconds=316&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + sleep 10 + compare_mysql_cmd select-3 'SELECT * from myApp.myApp;' '-h proxy-switch-haproxy -uroot -proot_password' + local command_id=select-3 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-haproxy -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-haproxy -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-haproxy -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9FVQ7qFV1w +++ mktemp ++ local LAST_ERR=/tmp/tmp.cIEDZjSV49 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9FVQ7qFV1w ++ cat /tmp/tmp.cIEDZjSV49 ++ rm /tmp/tmp.9FVQ7qFV1w /tmp/tmp.cIEDZjSV49 ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-wq5rx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:10:57.716073 13675 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778962255578525000&timeoutSeconds=597&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-3.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-3.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-3.sql /tmp/tmp.he19Xv9u5M/select-3.sql + desc 'verify data exists on all PXC nodes' + set +o xtrace ----------------------------------------------------------------------------------- verify data exists on all PXC nodes ----------------------------------------------------------------------------------- + compare_mysql_cmd select-3 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -proot_password' + local command_id=select-3 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-0.proxy-switch-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YpX9azVT7i +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vvh3SdxWgG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YpX9azVT7i ++ cat /tmp/tmp.Vvh3SdxWgG ++ rm /tmp/tmp.YpX9azVT7i /tmp/tmp.Vvh3SdxWgG ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-wq5rx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:11:07.467828 15077 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778962265172517000&timeoutSeconds=499&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-3.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-3.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-3.sql /tmp/tmp.he19Xv9u5M/select-3.sql + compare_mysql_cmd select-3 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -proot_password' + local command_id=select-3 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-1.proxy-switch-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I2jYCoIntX +++ mktemp ++ local LAST_ERR=/tmp/tmp.KqYRNzoOSj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I2jYCoIntX ++ cat /tmp/tmp.KqYRNzoOSj ++ rm /tmp/tmp.I2jYCoIntX /tmp/tmp.KqYRNzoOSj ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-wq5rx ++ grep -E '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:11:21.001517 17112 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778962278576773000&timeoutSeconds=344&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-3.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-3.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-3.sql /tmp/tmp.he19Xv9u5M/select-3.sql + compare_mysql_cmd select-3 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -proot_password' + local command_id=select-3 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -proot_password' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.4 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + [[ -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-3-80.sql ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -proot_password' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h proxy-switch-pxc-2.proxy-switch-pxc -uroot -proot_password' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PmqgXJuePJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.0wVH5IUIBk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PmqgXJuePJ ++ cat /tmp/tmp.0wVH5IUIBk ++ rm /tmp/tmp.PmqgXJuePJ /tmp/tmp.0wVH5IUIBk ++ return 0 + client_pod=pxc-client-67fc4995bb-wq5rx + wait_pod pxc-client-67fc4995bb-wq5rx + local pod=pxc-client-67fc4995bb-wq5rx + local max_retry=480 + local ns= ++ echo pxc-client-67fc4995bb-wq5rx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ grep -E '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-67fc4995bb-wq5rx condition met E0516 20:11:32.056445 18764 reflector.go:227] "Failed to watch" err="Get \"https://34.173.120.16/api/v1/namespaces/proxy-switch-28565/pods?allowWatchBookmarks=true&fieldSelector=metadata.name%3Dpxc-client-67fc4995bb-wq5rx&resourceVersion=1778962289417896000&timeoutSeconds=454&watch=true\": context canceled" reflector="k8s.io/client-go/tools/watch/informerwatcher.go:162" type="*unstructured.Unstructured" waiting for pod/pxc-client-67fc4995bb-wq5rx to become ReadyDefaulted container "pxc-client" out of: pxc-client, backup .Ok + set +o xtrace + [[ ! -s /tmp/tmp.he19Xv9u5M/select-3.sql ]] ++ grep 'Unknown MySQL server host' /tmp/tmp.he19Xv9u5M/select-3.sql + [[ -n '' ]] + [[ 0 -eq 0 ]] + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-2476/e2e-tests/proxy-switch/compare/select-3.sql /tmp/tmp.he19Xv9u5M/select-3.sql + desc 'clean up' + set +o xtrace ----------------------------------------------------------------------------------- clean up ----------------------------------------------------------------------------------- + destroy proxy-switch-28565 + local namespace=proxy-switch-28565 + local ignore_logs=true + [[ 0 == 1 ]] + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + tee /tmp/tmp.he19Xv9u5M/operator.log + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v 'the object has been modified' + sort -u + grep -v 'get backup status: Job.batch' + grep -v level=info ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ jq -r '.items[] | select(.metadata.deletionTimestamp == null) | .metadata.name' ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator +++ mktemp ++ head -1 ++ local LAST_OUT=/tmp/tmp.0luDCXNots +++ mktemp ++ local LAST_ERR=/tmp/tmp.rCg4XdfDgp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator --field-selector=status.phase=Running -o json -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0luDCXNots ++ cat /tmp/tmp.rCg4XdfDgp ++ rm /tmp/tmp.0luDCXNots /tmp/tmp.rCg4XdfDgp ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-8548fd5788-nkq4t ++ mktemp + local LAST_OUT=/tmp/tmp.6ybs6gyWVo ++ mktemp + local LAST_ERR=/tmp/tmp.Vhb0zYHKXw + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-8548fd5788-nkq4t + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6ybs6gyWVo + cat /tmp/tmp.Vhb0zYHKXw + rm /tmp/tmp.6ybs6gyWVo /tmp/tmp.Vhb0zYHKXw + return 0 2026-05-16T19:54:42.510Z INFO setup Feature gates {"PXCO_FEATURE_GATES": "", "enabled": ""} 2026-05-16T19:54:42.510Z INFO setup Manager starting up {"gitCommit": "a8b01a395609d7c1bde5b4299a9d804bfdd7ae3d", "gitBranch": "PR-2476-a8b01a39", "buildTime": "2026-05-16T17:59:58Z", "goVersion": "go1.26.3", "os": "linux", "arch": "amd64"} 2026-05-16T19:54:42.510Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.33.11-gke.1137000"} 2026-05-16T19:54:42.513Z INFO setup Registering Components. 2026-05-16T19:54:43.208Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2026-05-16T19:54:43.208Z INFO setup Starting the Cmd. 2026-05-16T19:54:43.209Z INFO controller-runtime.certwatcher Starting certificate poll+watcher {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key", "interval": "10s"} 2026-05-16T19:54:43.209Z INFO controller-runtime.certwatcher Updated current TLS certificate {"cert": "/tmp/k8s-webhook-server/serving-certs/tls.crt", "key": "/tmp/k8s-webhook-server/serving-certs/tls.key"} 2026-05-16T19:54:43.209Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2026-05-16T19:54:43.209Z INFO controller-runtime.metrics Starting metrics server 2026-05-16T19:54:43.209Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2026-05-16T19:54:43.209Z INFO controller-runtime.webhook Starting webhook server 2026-05-16T19:54:43.209Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2026-05-16T19:54:43.309Z INFO Attempting to acquire leader lease... {"lock": "pxc-operator/08db1feb.percona.com"} 2026-05-16T19:54:43.332Z DEBUG events percona-xtradb-cluster-operator-8548fd5788-nkq4t_a052a61e-331e-4a97-a2c8-2bce980296a8 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"50107d6d-c099-49eb-8678-9ae42daa1d4f","apiVersion":"coordination.k8s.io/v1","resourceVersion":"1778961283327279009"}, "reason": "LeaderElection"} 2026-05-16T19:54:43.332Z INFO Starting EventSource {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2026-05-16T19:54:43.332Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.PerconaXtraDBCluster"} 2026-05-16T19:54:43.332Z INFO Starting EventSource {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "source": "kind source: *v1.Secret"} 2026-05-16T19:54:43.332Z INFO Starting EventSource {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2026-05-16T19:54:43.332Z INFO Successfully acquired lease {"lock": "pxc-operator/08db1feb.percona.com"} 2026-05-16T19:54:43.533Z INFO Starting Controller {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore"} 2026-05-16T19:54:43.534Z INFO Starting Controller {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup"} 2026-05-16T19:54:43.534Z INFO Starting Controller {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster"} 2026-05-16T19:54:43.534Z INFO Starting workers {"controller": "pxcbackup-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterBackup", "worker count": 1} 2026-05-16T19:54:43.534Z INFO Starting workers {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "worker count": 1} 2026-05-16T19:54:43.534Z INFO Starting workers {"controller": "pxcrestore-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBClusterRestore", "worker count": 1} 2026-05-16T19:55:54.353Z INFO Set CR version {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "7548fedf-36d9-4d42-bc57-9f408a8a178b", "version": "1.20.0"} 2026-05-16T19:55:56.497Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "7548fedf-36d9-4d42-bc57-9f408a8a178b", "object": "auto-proxy-switch-pxc", "kind": "&TypeMeta{Kind:ConfigMap,APIVersion:v1,}"} 2026-05-16T19:55:56.615Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "7548fedf-36d9-4d42-bc57-9f408a8a178b", "object": "proxy-switch-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-05-16T19:55:56.678Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "7548fedf-36d9-4d42-bc57-9f408a8a178b", "object": "proxy-switch-pxc", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-05-16T19:55:56.754Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "7548fedf-36d9-4d42-bc57-9f408a8a178b", "object": "proxy-switch-pxc", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-05-16T19:55:56.826Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "7548fedf-36d9-4d42-bc57-9f408a8a178b", "object": "proxy-switch-pxc-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-05-16T19:55:56.911Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "7548fedf-36d9-4d42-bc57-9f408a8a178b", "object": "proxy-switch-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-05-16T19:55:57.044Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "7548fedf-36d9-4d42-bc57-9f408a8a178b", "object": "proxy-switch-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-05-16T19:55:57.938Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "532d8710-eb01-450f-b407-aff3927cc563", "object": "proxy-switch-pxc", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-05-16T19:55:57.959Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "532d8710-eb01-450f-b407-aff3927cc563", "object": "proxy-switch-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-05-16T19:57:29.467Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "2028ad2b-6e9f-4dbe-8024-40acdde3b5a9", "user": "operator"} 2026-05-16T19:57:29.497Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "2028ad2b-6e9f-4dbe-8024-40acdde3b5a9", "user": "monitor"} 2026-05-16T19:57:29.553Z INFO User monitor: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "2028ad2b-6e9f-4dbe-8024-40acdde3b5a9"} 2026-05-16T19:57:29.580Z INFO monitor user privileges granted {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "2028ad2b-6e9f-4dbe-8024-40acdde3b5a9"} 2026-05-16T19:57:29.619Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "2028ad2b-6e9f-4dbe-8024-40acdde3b5a9", "user": "xtrabackup"} 2026-05-16T19:57:29.660Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "2028ad2b-6e9f-4dbe-8024-40acdde3b5a9"} 2026-05-16T19:57:29.690Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "2028ad2b-6e9f-4dbe-8024-40acdde3b5a9", "user": "replication"} 2026-05-16T20:00:07.172Z INFO Password expiration policy updated {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "05477c7e-237a-4e4d-b828-5fec3b14d158", "user": "root"} 2026-05-16T20:00:07.271Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "05477c7e-237a-4e4d-b828-5fec3b14d158", "new version": "8.0.43-34.1"} 2026-05-16T20:03:35.377Z INFO controller-runtime.cache Warning: watch ended with error {"reflector": "pkg/mod/k8s.io/client-go@v0.36.1/tools/cache/reflector.go:343", "type": "*v1.Event", "err": "very short watch: pkg/mod/k8s.io/client-go@v0.36.1/tools/cache/reflector.go:343: Unexpected watch close - watch lasted less than a second and no items received"} 2026-05-16T20:05:02.519Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "c18ecd93-b46d-46d0-8336-01ec81952ced", "object": "proxy-switch-proxysql", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-05-16T20:05:02.893Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "c18ecd93-b46d-46d0-8336-01ec81952ced", "object": "proxy-switch-proxysql", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-05-16T20:05:03.333Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "c18ecd93-b46d-46d0-8336-01ec81952ced", "object": "proxy-switch-proxysql-unready", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-05-16T20:05:06.684Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "c18ecd93-b46d-46d0-8336-01ec81952ced", "err": "get primary pxc pod: failed to get proxy connection: dial tcp: lookup proxy-switch-proxysql-unready.proxy-switch-28565 on 34.118.224.10:53: no such host"} 2026-05-16T20:05:07.424Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "68d606e5-815d-4bc1-8a76-6ee4e10b5d49", "object": "proxy-switch-proxysql", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-05-16T20:05:11.100Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "68d606e5-815d-4bc1-8a76-6ee4e10b5d49", "err": "get primary pxc pod: failed to get proxy connection: dial tcp: lookup proxy-switch-proxysql-unready.proxy-switch-28565 on 34.118.224.10:53: no such host"} 2026-05-16T20:05:19.331Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "ee41aa0e-7978-4227-9ef8-e972d21ca3a3", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.160.73.8:6032: connect: connection refused"} 2026-05-16T20:05:24.507Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "46ef7de0-cfa5-4e55-a66f-965ef2dfc285", "err": "get primary pxc pod: not found"} 2026-05-16T20:05:24.715Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "4d5924a5-3041-4ac7-9c04-8ae0d174ab1d", "err": "get primary pxc pod: not found"} 2026-05-16T20:05:30.379Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "577acfa5-ba61-4279-aec9-85d46a3772f9", "err": "get primary pxc pod: not found"} 2026-05-16T20:05:35.743Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "11486acf-1f77-4498-8dc6-015e30bf188e", "err": "get primary pxc pod: not found"} 2026-05-16T20:06:15.258Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "df09f9fe-23d0-4288-9789-615fde8e4116", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:993\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:837\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1771"} 2026-05-16T20:06:26.195Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "89f38bd8-1cf7-4430-814f-d8242fe69571", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:993\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:837\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1771"} 2026-05-16T20:06:36.390Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "ff7eb09a-464d-47c1-b18b-53d3720e0f74", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:993\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:837\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1771"} 2026-05-16T20:06:47.562Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "693baacb-13a3-4378-a32f-5ed5270333e2", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:993\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:837\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1771"} 2026-05-16T20:06:58.147Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "12b3121e-9801-416c-932a-a62f812680d3", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:993\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:837\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1771"} 2026-05-16T20:07:08.589Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "2eb2dff2-a43c-4e6c-8866-373bf8161cb5", "error": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: failed to execute command in pod: command terminated with exit code 1 / / ERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\nERROR (line:515) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:993\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:837\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1771"} 2026-05-16T20:07:16.144Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "6edf9e11-607b-4c5a-9540-bb5e4e9ef56c"} 2026-05-16T20:07:21.866Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "c7bc84ba-be6c-44d8-a804-4e2b3bfcdb3c"} 2026-05-16T20:07:27.062Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "8f703e07-e219-447c-9c1a-db81f97a3ef0"} 2026-05-16T20:07:32.142Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "1df7b41d-b43b-4a0e-be4d-d2f0e44c9090"} 2026-05-16T20:07:38.069Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "8036eb49-8ea4-40d7-a834-4bce69b85c9e"} 2026-05-16T20:07:42.794Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "0dfced5b-25f3-4c22-a187-5b9e6c516130"} 2026-05-16T20:07:48.091Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "b496a5c8-ffb3-49ee-b180-7520e0484a05"} 2026-05-16T20:07:52.630Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "64a6220a-f83c-47d5-923a-015737202d7c", "object": "proxy-switch-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-05-16T20:07:52.670Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "64a6220a-f83c-47d5-923a-015737202d7c", "object": "proxy-switch-haproxy", "kind": "&TypeMeta{Kind:StatefulSet,APIVersion:apps/v1,}"} 2026-05-16T20:07:52.726Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "64a6220a-f83c-47d5-923a-015737202d7c", "object": "proxy-switch-haproxy", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-05-16T20:07:52.864Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "64a6220a-f83c-47d5-923a-015737202d7c", "object": "proxy-switch-haproxy-replicas", "kind": "&TypeMeta{Kind:Service,APIVersion:v1,}"} 2026-05-16T20:07:54.072Z ERROR sync users {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "a4820870-6c32-447e-9acb-d30a77a1038d", "error": "exec syncusers: failed to execute command in pod: unable to upgrade connection: pods \"proxy-switch-proxysql-2\" not found / / ", "errorVerbose": "exec syncusers: failed to execute command in pod: unable to upgrade connection: pods \"proxy-switch-proxysql-2\" not found / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:993\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:837\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1771"} 2026-05-16T20:07:56.434Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "64a6220a-f83c-47d5-923a-015737202d7c", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.236.165:3306: connect: connection refused"} 2026-05-16T20:07:57.120Z DEBUG Creating object {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "4686a5b7-0d43-4251-b9ef-26e7be6f9ee3", "object": "proxy-switch-haproxy", "kind": "&TypeMeta{Kind:PodDisruptionBudget,APIVersion:policy/v1,}"} 2026-05-16T20:08:00.149Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "4686a5b7-0d43-4251-b9ef-26e7be6f9ee3", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.236.165:3306: connect: connection refused"} 2026-05-16T20:08:08.639Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "c0eca718-ac30-4c4d-93bf-410c279a8f8a", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.236.165:3306: connect: connection refused"} 2026-05-16T20:08:16.826Z INFO reconcile replication error {"controller": "pxc-controller", "controllerGroup": "pxc.percona.com", "controllerKind": "PerconaXtraDBCluster", "PerconaXtraDBCluster": {"name":"proxy-switch","namespace":"proxy-switch-28565"}, "namespace": "proxy-switch-28565", "name": "proxy-switch", "reconcileID": "f0254ed8-2a9d-4254-b3f3-7ecec33a0f88", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 34.118.236.165:3306: connect: connection refused"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:839 + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + kubectl patch pxc -n proxy-switch-28565 proxy-switch --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/proxy-switch patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.zkgD9mUxYj ++ mktemp + local LAST_ERR=/tmp/tmp.u7DH2nKtHe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zkgD9mUxYj perconaxtradbcluster.pxc.percona.com "proxy-switch" deleted from proxy-switch-28565 namespace + cat /tmp/tmp.u7DH2nKtHe + rm /tmp/tmp.zkgD9mUxYj /tmp/tmp.u7DH2nKtHe + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.m2PHcSXtDL ++ mktemp + local LAST_ERR=/tmp/tmp.IqBOHrkHwb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.m2PHcSXtDL No resources found + cat /tmp/tmp.IqBOHrkHwb + rm /tmp/tmp.m2PHcSXtDL /tmp/tmp.IqBOHrkHwb + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.UgWaO6r4K4 ++ mktemp + local LAST_ERR=/tmp/tmp.tFozZM22yr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UgWaO6r4K4 No resources found + cat /tmp/tmp.tFozZM22yr + rm /tmp/tmp.UgWaO6r4K4 /tmp/tmp.tFozZM22yr + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.WZllSf78mD ++ mktemp + local LAST_ERR=/tmp/tmp.TdypXr49Om + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WZllSf78mD validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.TdypXr49Om + rm /tmp/tmp.WZllSf78mD /tmp/tmp.TdypXr49Om + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.19.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + rm -rf /tmp/tmp.he19Xv9u5M + kubectl_bin delete --grace-period=0 --force=true namespace proxy-switch-28565 ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator + local LAST_OUT=/tmp/tmp.3RJcDuJGAJ ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.12kf3kwBHU + local LAST_ERR=/tmp/tmp.nD9h3w0bQn + local exit_status=0 ++ mktemp ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace proxy-switch-28565 + local LAST_ERR=/tmp/tmp.pcbXJUt48Y + local exit_status=0 + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator