Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-11047 + local ns=users-11047 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-3921 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.4RxHzuzxbk ++ mktemp + local LAST_ERR=/tmp/tmp.PCrUs3cwgj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4RxHzuzxbk perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.PCrUs3cwgj + rm /tmp/tmp.4RxHzuzxbk /tmp/tmp.PCrUs3cwgj + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.rV5Yj6mlSC ++ mktemp + local LAST_ERR=/tmp/tmp.C4QELKsGbK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rV5Yj6mlSC No resources found + cat /tmp/tmp.C4QELKsGbK + rm /tmp/tmp.rV5Yj6mlSC /tmp/tmp.C4QELKsGbK + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.tGFEKIXgSS ++ mktemp + local LAST_ERR=/tmp/tmp.EgXMW4Wi3L + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tGFEKIXgSS No resources found + cat /tmp/tmp.EgXMW4Wi3L + rm /tmp/tmp.tGFEKIXgSS /tmp/tmp.EgXMW4Wi3L + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ grep chaos-mesh.org ++ kubectl get crd ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.pN2S8Lrz47 ++ mktemp + local LAST_OUT=/tmp/tmp.VyMdZbFFJZ + awk '{print$1}' ++ mktemp + local LAST_ERR=/tmp/tmp.0IiFO9Ngoh + local exit_status=0 + local LAST_ERR=/tmp/tmp.XbQyrTX6IE + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VyMdZbFFJZ + cat /tmp/tmp.XbQyrTX6IE + rm /tmp/tmp.VyMdZbFFJZ /tmp/tmp.XbQyrTX6IE + return 0 namespace "users-3921" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pN2S8Lrz47 namespace "pxc-operator" deleted + cat /tmp/tmp.0IiFO9Ngoh + rm /tmp/tmp.pN2S8Lrz47 /tmp/tmp.0IiFO9Ngoh + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.sJ7Rk1HKR4 ++ mktemp + local LAST_ERR=/tmp/tmp.JarBhLhoBe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sJ7Rk1HKR4 namespace/pxc-operator created + cat /tmp/tmp.JarBhLhoBe + rm /tmp/tmp.sJ7Rk1HKR4 /tmp/tmp.JarBhLhoBe + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.1R6pIZy0Be +++ mktemp ++ local LAST_ERR=/tmp/tmp.LGVFcPij9z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1R6pIZy0Be ++ cat /tmp/tmp.LGVFcPij9z ++ rm /tmp/tmp.1R6pIZy0Be /tmp/tmp.LGVFcPij9z ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1735-1d5766ea-4-cluster9 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.12N82ISR8t ++ mktemp + local LAST_ERR=/tmp/tmp.SDCiTnltaR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1735-1d5766ea-4-cluster9 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.12N82ISR8t Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1735-1d5766ea-4-cluster9" modified. + cat /tmp/tmp.SDCiTnltaR + rm /tmp/tmp.12N82ISR8t /tmp/tmp.SDCiTnltaR + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.BgAlEVtNvn ++ mktemp + local LAST_ERR=/tmp/tmp.KoV7GDJ35G + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BgAlEVtNvn customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.KoV7GDJ35G + rm /tmp/tmp.BgAlEVtNvn /tmp/tmp.KoV7GDJ35G + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.M9wPB6mCdH ++ mktemp + local LAST_ERR=/tmp/tmp.gmrG935gTa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M9wPB6mCdH clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.gmrG935gTa + rm /tmp/tmp.M9wPB6mCdH /tmp/tmp.gmrG935gTa + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1735-1d5766ea^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + local LAST_OUT=/tmp/tmp.f0siQ28FN7 ++ mktemp + local LAST_ERR=/tmp/tmp.8EZkwV9SrL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.f0siQ28FN7 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.8EZkwV9SrL + rm /tmp/tmp.f0siQ28FN7 /tmp/tmp.8EZkwV9SrL + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.qYMjPanpi0 ++ mktemp + local LAST_ERR=/tmp/tmp.EC4msTJJp6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qYMjPanpi0 pod/percona-xtradb-cluster-operator-74f48b466f-86l2g condition met + cat /tmp/tmp.EC4msTJJp6 + rm /tmp/tmp.qYMjPanpi0 /tmp/tmp.EC4msTJJp6 + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.bItAWzwJHq +++ mktemp ++ local LAST_ERR=/tmp/tmp.VYRRBEtNJo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bItAWzwJHq ++ cat /tmp/tmp.VYRRBEtNJo ++ rm /tmp/tmp.bItAWzwJHq /tmp/tmp.VYRRBEtNJo ++ return 0 + wait_pod percona-xtradb-cluster-operator-74f48b466f-86l2g 480 pxc-operator + local pod=percona-xtradb-cluster-operator-74f48b466f-86l2g + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-74f48b466f-86l2g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-74f48b466f-86l2g condition met percona-xtradb-cluster-operator-74f48b466f-86l2g.Ok + sleep 3 + create_namespace users-11047 + local namespace=users-11047 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-11047' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-11047 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-11047 + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.jegIzYQnTG + local LAST_OUT=/tmp/tmp.Vj4O3o9VNB ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.zjrBshS0ye + local exit_status=0 + local LAST_ERR=/tmp/tmp.g6yKqSpDk4 + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-11047 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-11047 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jegIzYQnTG + cat /tmp/tmp.zjrBshS0ye + rm /tmp/tmp.jegIzYQnTG /tmp/tmp.zjrBshS0ye + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-11047 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.Vj4O3o9VNB + cat /tmp/tmp.g6yKqSpDk4 Error from server (NotFound): namespaces "users-11047" not found + rm /tmp/tmp.Vj4O3o9VNB /tmp/tmp.g6yKqSpDk4 + return 1 + : + wait_for_delete namespace/users-11047 + local res=namespace/users-11047 + echo -n 'namespace/users-11047 - ' namespace/users-11047 - + set +o xtrace Error from server (NotFound): namespaces "users-11047" not found + desc 'create namespace users-11047' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-11047 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-11047 ++ mktemp + local LAST_OUT=/tmp/tmp.Eufhq1adW3 ++ mktemp + local LAST_ERR=/tmp/tmp.THaCN0WryH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-11047 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Eufhq1adW3 namespace/users-11047 created + cat /tmp/tmp.THaCN0WryH + rm /tmp/tmp.Eufhq1adW3 /tmp/tmp.THaCN0WryH + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.WqFRTiVeOD +++ mktemp ++ local LAST_ERR=/tmp/tmp.t75MLjxdgt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WqFRTiVeOD ++ cat /tmp/tmp.t75MLjxdgt ++ rm /tmp/tmp.WqFRTiVeOD /tmp/tmp.t75MLjxdgt ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1735-1d5766ea-4-cluster9 --namespace=users-11047 ++ mktemp + local LAST_OUT=/tmp/tmp.BHOSRKEMSU ++ mktemp + local LAST_ERR=/tmp/tmp.ENX3tyHb0Z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1735-1d5766ea-4-cluster9 --namespace=users-11047 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BHOSRKEMSU Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1735-1d5766ea-4-cluster9" modified. + cat /tmp/tmp.ENX3tyHb0Z + rm /tmp/tmp.BHOSRKEMSU /tmp/tmp.ENX3tyHb0Z + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.L5O3h8wAvN ++ mktemp + local LAST_ERR=/tmp/tmp.079U4QPIht + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.L5O3h8wAvN secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.079U4QPIht + rm /tmp/tmp.L5O3h8wAvN /tmp/tmp.079U4QPIht + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.HUvvGmCudG ++ mktemp + local LAST_ERR=/tmp/tmp.x44nQMmhQ7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HUvvGmCudG secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.x44nQMmhQ7 + rm /tmp/tmp.HUvvGmCudG /tmp/tmp.x44nQMmhQ7 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-11047~ + local LAST_OUT=/tmp/tmp.SaptFkE4Iu + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + local LAST_ERR=/tmp/tmp.nbVTlZGqP4 + local exit_status=0 + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1735-1d5766ea#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SaptFkE4Iu deployment.apps/pxc-client created + cat /tmp/tmp.nbVTlZGqP4 + rm /tmp/tmp.SaptFkE4Iu /tmp/tmp.nbVTlZGqP4 + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1735-1d5766ea#' + local LAST_OUT=/tmp/tmp.QXjnOPcJ9w + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-11047~ + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_ERR=/tmp/tmp.5ZeEe0kg2i + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QXjnOPcJ9w perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.5ZeEe0kg2i + rm /tmp/tmp.QXjnOPcJ9w /tmp/tmp.5ZeEe0kg2i + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.MIaAeGJvoV ++++ mktemp +++ local LAST_ERR=/tmp/tmp.xQzCoFjACC +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.MIaAeGJvoV +++ cat /tmp/tmp.xQzCoFjACC +++ rm /tmp/tmp.MIaAeGJvoV /tmp/tmp.xQzCoFjACC +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.Hcc7RmEgsU ++++ mktemp +++ local LAST_ERR=/tmp/tmp.dZgfETMaPj +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.Hcc7RmEgsU +++ cat /tmp/tmp.dZgfETMaPj +++ rm /tmp/tmp.Hcc7RmEgsU /tmp/tmp.dZgfETMaPj +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-11047 ++ mktemp + local LAST_OUT=/tmp/tmp.HmsUwYt4ni ++ mktemp + local LAST_ERR=/tmp/tmp.0Ow87900X0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-11047 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-11047 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-11047 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.HmsUwYt4ni + cat /tmp/tmp.0Ow87900X0 error: no matching resources found + rm /tmp/tmp.HmsUwYt4ni /tmp/tmp.0Ow87900X0 + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KHYdqQQb2p +++ mktemp ++ local LAST_ERR=/tmp/tmp.4D24ealMd6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KHYdqQQb2p ++ cat /tmp/tmp.4D24ealMd6 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.KHYdqQQb2p /tmp/tmp.4D24ealMd6 ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wddzHLPt2S +++ mktemp ++ local LAST_ERR=/tmp/tmp.8ZMFlJgjGE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wddzHLPt2S ++ cat /tmp/tmp.8ZMFlJgjGE ++ rm /tmp/tmp.wddzHLPt2S /tmp/tmp.8ZMFlJgjGE ++ return 0 + client_pod=pxc-client-64b479df95-82fl6 + wait_pod pxc-client-64b479df95-82fl6 + local pod=pxc-client-64b479df95-82fl6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-82fl6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-82fl6 condition met pxc-client-64b479df95-82fl6.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5ij0pffFBx +++ mktemp ++ local LAST_ERR=/tmp/tmp.mv11jmu8bi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5ij0pffFBx ++ cat /tmp/tmp.mv11jmu8bi ++ rm /tmp/tmp.5ij0pffFBx /tmp/tmp.mv11jmu8bi ++ return 0 + client_pod=pxc-client-64b479df95-82fl6 + wait_pod pxc-client-64b479df95-82fl6 + local pod=pxc-client-64b479df95-82fl6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-82fl6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-82fl6 condition met pxc-client-64b479df95-82fl6.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.auO0Nrm8KZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.KRYZt7tzN2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.auO0Nrm8KZ ++ cat /tmp/tmp.KRYZt7tzN2 ++ rm /tmp/tmp.auO0Nrm8KZ /tmp/tmp.KRYZt7tzN2 ++ return 0 + client_pod=pxc-client-64b479df95-82fl6 + wait_pod pxc-client-64b479df95-82fl6 + local pod=pxc-client-64b479df95-82fl6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-82fl6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-82fl6 condition met pxc-client-64b479df95-82fl6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-1.sql /tmp/tmp.k32Y9mXGJH/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GMjGL6c5y1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.p8eMYwal9K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GMjGL6c5y1 ++ cat /tmp/tmp.p8eMYwal9K ++ rm /tmp/tmp.GMjGL6c5y1 /tmp/tmp.p8eMYwal9K ++ return 0 + client_pod=pxc-client-64b479df95-82fl6 + wait_pod pxc-client-64b479df95-82fl6 + local pod=pxc-client-64b479df95-82fl6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-82fl6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-82fl6 condition met pxc-client-64b479df95-82fl6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-1.sql /tmp/tmp.k32Y9mXGJH/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PuEf9xiR5X +++ mktemp ++ local LAST_ERR=/tmp/tmp.05gE91jZ3y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PuEf9xiR5X ++ cat /tmp/tmp.05gE91jZ3y ++ rm /tmp/tmp.PuEf9xiR5X /tmp/tmp.05gE91jZ3y ++ return 0 + client_pod=pxc-client-64b479df95-82fl6 + wait_pod pxc-client-64b479df95-82fl6 + local pod=pxc-client-64b479df95-82fl6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-82fl6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-82fl6 condition met pxc-client-64b479df95-82fl6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-1.sql /tmp/tmp.k32Y9mXGJH/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ELsdTkAHUs +++ mktemp ++ local LAST_ERR=/tmp/tmp.4kIKPSxjYL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ELsdTkAHUs ++ cat /tmp/tmp.4kIKPSxjYL Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.ELsdTkAHUs /tmp/tmp.4kIKPSxjYL ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.DvWm7sZEuZ ++ mktemp + local LAST_ERR=/tmp/tmp.35rLCm89C1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DvWm7sZEuZ secret/my-cluster-secrets patched + cat /tmp/tmp.35rLCm89C1 + rm /tmp/tmp.DvWm7sZEuZ /tmp/tmp.35rLCm89C1 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LywIcUNGU8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.pGQ7Ph48qB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LywIcUNGU8 ++ cat /tmp/tmp.pGQ7Ph48qB ++ rm /tmp/tmp.LywIcUNGU8 /tmp/tmp.pGQ7Ph48qB ++ return 0 + client_pod=pxc-client-64b479df95-82fl6 + wait_pod pxc-client-64b479df95-82fl6 + local pod=pxc-client-64b479df95-82fl6 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-82fl6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-82fl6 condition met pxc-client-64b479df95-82fl6.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-4.sql /tmp/tmp.k32Y9mXGJH/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.oqRPmQqYPr ++ mktemp + local LAST_ERR=/tmp/tmp.VF4DwpJLX3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oqRPmQqYPr perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.VF4DwpJLX3 + rm /tmp/tmp.oqRPmQqYPr /tmp/tmp.VF4DwpJLX3 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ndoo62kgOM +++ mktemp ++ local LAST_ERR=/tmp/tmp.jINTyEMMfm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ndoo62kgOM ++ cat /tmp/tmp.jINTyEMMfm ++ rm /tmp/tmp.ndoo62kgOM /tmp/tmp.jINTyEMMfm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PKEKh0urj4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GCLPFlALUn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PKEKh0urj4 ++ cat /tmp/tmp.GCLPFlALUn ++ rm /tmp/tmp.PKEKh0urj4 /tmp/tmp.GCLPFlALUn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VsoSBkUL76 +++ mktemp ++ local LAST_ERR=/tmp/tmp.vKci0BC1NR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VsoSBkUL76 ++ cat /tmp/tmp.vKci0BC1NR ++ rm /tmp/tmp.VsoSBkUL76 /tmp/tmp.vKci0BC1NR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JBJm01tR7K +++ mktemp ++ local LAST_ERR=/tmp/tmp.4MA8r4Rh2C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JBJm01tR7K ++ cat /tmp/tmp.4MA8r4Rh2C ++ rm /tmp/tmp.JBJm01tR7K /tmp/tmp.4MA8r4Rh2C ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bDrV2466AR +++ mktemp ++ local LAST_ERR=/tmp/tmp.GgrlZKR80f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bDrV2466AR ++ cat /tmp/tmp.GgrlZKR80f ++ rm /tmp/tmp.bDrV2466AR /tmp/tmp.GgrlZKR80f ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qfv4L9CHFk +++ mktemp ++ local LAST_ERR=/tmp/tmp.lsoBQPPW4c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qfv4L9CHFk ++ cat /tmp/tmp.lsoBQPPW4c ++ rm /tmp/tmp.Qfv4L9CHFk /tmp/tmp.lsoBQPPW4c ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wcpdw2x1rH +++ mktemp ++ local LAST_ERR=/tmp/tmp.5o1LNzLaEh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wcpdw2x1rH ++ cat /tmp/tmp.5o1LNzLaEh ++ rm /tmp/tmp.wcpdw2x1rH /tmp/tmp.5o1LNzLaEh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zHzzpn7usR +++ mktemp ++ local LAST_ERR=/tmp/tmp.VG1sMphhul ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zHzzpn7usR ++ cat /tmp/tmp.VG1sMphhul ++ rm /tmp/tmp.zHzzpn7usR /tmp/tmp.VG1sMphhul ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8urNF6wDIU +++ mktemp ++ local LAST_ERR=/tmp/tmp.VlBQK9MHGh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8urNF6wDIU ++ cat /tmp/tmp.VlBQK9MHGh ++ rm /tmp/tmp.8urNF6wDIU /tmp/tmp.VlBQK9MHGh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kfzgxRdg7A +++ mktemp ++ local LAST_ERR=/tmp/tmp.XmtPTFyMrl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kfzgxRdg7A ++ cat /tmp/tmp.XmtPTFyMrl ++ rm /tmp/tmp.kfzgxRdg7A /tmp/tmp.XmtPTFyMrl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uPjKKoeA1T +++ mktemp ++ local LAST_ERR=/tmp/tmp.921jsTDB4G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uPjKKoeA1T ++ cat /tmp/tmp.921jsTDB4G ++ rm /tmp/tmp.uPjKKoeA1T /tmp/tmp.921jsTDB4G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6kUDGG86kI +++ mktemp ++ local LAST_ERR=/tmp/tmp.oFFLeR9yH4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6kUDGG86kI ++ cat /tmp/tmp.oFFLeR9yH4 ++ rm /tmp/tmp.6kUDGG86kI /tmp/tmp.oFFLeR9yH4 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RFoPIgFG89 +++ mktemp ++ local LAST_ERR=/tmp/tmp.20haCVn9zt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RFoPIgFG89 ++ cat /tmp/tmp.20haCVn9zt ++ rm /tmp/tmp.RFoPIgFG89 /tmp/tmp.20haCVn9zt ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ciPRc6eG97 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.VtGX21Yt4p +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ciPRc6eG97 +++++ cat /tmp/tmp.VtGX21Yt4p +++++ rm /tmp/tmp.ciPRc6eG97 /tmp/tmp.VtGX21Yt4p +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ivA58s2X7b ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bCl2SJeFpd +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ivA58s2X7b +++++ cat /tmp/tmp.bCl2SJeFpd +++++ rm /tmp/tmp.ivA58s2X7b /tmp/tmp.bCl2SJeFpd +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.M6xK1ySFG0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4KE67O84fp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.M6xK1ySFG0 ++ cat /tmp/tmp.4KE67O84fp ++ rm /tmp/tmp.M6xK1ySFG0 /tmp/tmp.4KE67O84fp ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.oywDb1nGnJ ++ mktemp + local LAST_ERR=/tmp/tmp.U8PdieAxA6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oywDb1nGnJ secret/my-cluster-secrets patched + cat /tmp/tmp.U8PdieAxA6 + rm /tmp/tmp.oywDb1nGnJ /tmp/tmp.U8PdieAxA6 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VAy4SY74Gl +++ mktemp ++ local LAST_ERR=/tmp/tmp.HvceBKdq2u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VAy4SY74Gl ++ cat /tmp/tmp.HvceBKdq2u ++ rm /tmp/tmp.VAy4SY74Gl /tmp/tmp.HvceBKdq2u ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.50iuosSq6m +++ mktemp ++ local LAST_ERR=/tmp/tmp.lUNjKSUHOz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.50iuosSq6m ++ cat /tmp/tmp.lUNjKSUHOz ++ rm /tmp/tmp.50iuosSq6m /tmp/tmp.lUNjKSUHOz ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lNvdNi9kOI +++ mktemp ++ local LAST_ERR=/tmp/tmp.BwLhKzqZGi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lNvdNi9kOI ++ cat /tmp/tmp.BwLhKzqZGi ++ rm /tmp/tmp.lNvdNi9kOI /tmp/tmp.BwLhKzqZGi ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.y5Wh6H2KAG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vgZO6lLbQ8 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.y5Wh6H2KAG +++++ cat /tmp/tmp.vgZO6lLbQ8 +++++ rm /tmp/tmp.y5Wh6H2KAG /tmp/tmp.vgZO6lLbQ8 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.uNqHdz6lWu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.zS06fuXpAE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.uNqHdz6lWu +++++ cat /tmp/tmp.zS06fuXpAE +++++ rm /tmp/tmp.uNqHdz6lWu /tmp/tmp.zS06fuXpAE +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2K9PpfQxGP +++ mktemp ++ local LAST_ERR=/tmp/tmp.VC23ALMm9Y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2K9PpfQxGP ++ cat /tmp/tmp.VC23ALMm9Y ++ rm /tmp/tmp.2K9PpfQxGP /tmp/tmp.VC23ALMm9Y ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-2.sql /tmp/tmp.k32Y9mXGJH/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-2.sql /tmp/tmp.k32Y9mXGJH/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-2.sql /tmp/tmp.k32Y9mXGJH/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.1lGcyoJtFO ++ mktemp + local LAST_ERR=/tmp/tmp.5keKXrl3My + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1lGcyoJtFO perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.5keKXrl3My + rm /tmp/tmp.1lGcyoJtFO /tmp/tmp.5keKXrl3My + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.qMy3fniIgZ ++ mktemp + local LAST_ERR=/tmp/tmp.RKqutk3lU9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qMy3fniIgZ secret/my-cluster-secrets patched + cat /tmp/tmp.RKqutk3lU9 + rm /tmp/tmp.qMy3fniIgZ /tmp/tmp.RKqutk3lU9 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P9T5ef1njK +++ mktemp ++ local LAST_ERR=/tmp/tmp.eZy2gTsszj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P9T5ef1njK ++ cat /tmp/tmp.eZy2gTsszj ++ rm /tmp/tmp.P9T5ef1njK /tmp/tmp.eZy2gTsszj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4P7Y1xoSFY +++ mktemp ++ local LAST_ERR=/tmp/tmp.H27wwK83MR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4P7Y1xoSFY ++ cat /tmp/tmp.H27wwK83MR ++ rm /tmp/tmp.4P7Y1xoSFY /tmp/tmp.H27wwK83MR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oK07bS9YGC +++ mktemp ++ local LAST_ERR=/tmp/tmp.5OjZxn8YDi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oK07bS9YGC ++ cat /tmp/tmp.5OjZxn8YDi ++ rm /tmp/tmp.oK07bS9YGC /tmp/tmp.5OjZxn8YDi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.djfsFcaCSc +++ mktemp ++ local LAST_ERR=/tmp/tmp.U8SaR4Y4YN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.djfsFcaCSc ++ cat /tmp/tmp.U8SaR4Y4YN ++ rm /tmp/tmp.djfsFcaCSc /tmp/tmp.U8SaR4Y4YN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9OA3i7Yowo +++ mktemp ++ local LAST_ERR=/tmp/tmp.TfUUJgnY7H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9OA3i7Yowo ++ cat /tmp/tmp.TfUUJgnY7H ++ rm /tmp/tmp.9OA3i7Yowo /tmp/tmp.TfUUJgnY7H ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Eu0cYgxxB7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fMVuejMvgB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Eu0cYgxxB7 ++ cat /tmp/tmp.fMVuejMvgB ++ rm /tmp/tmp.Eu0cYgxxB7 /tmp/tmp.fMVuejMvgB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fxDu3u5fT7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3jKokEaHga ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fxDu3u5fT7 ++ cat /tmp/tmp.3jKokEaHga ++ rm /tmp/tmp.fxDu3u5fT7 /tmp/tmp.3jKokEaHga ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ggd6m6LN5e +++ mktemp ++ local LAST_ERR=/tmp/tmp.vLhK4kJPO0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ggd6m6LN5e ++ cat /tmp/tmp.vLhK4kJPO0 ++ rm /tmp/tmp.ggd6m6LN5e /tmp/tmp.vLhK4kJPO0 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4JGrhVVkR8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.PjWTIOpGVm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4JGrhVVkR8 ++ cat /tmp/tmp.PjWTIOpGVm ++ rm /tmp/tmp.4JGrhVVkR8 /tmp/tmp.PjWTIOpGVm ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.BRei5KAG2n ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.WgBfJ1Sgbe +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.BRei5KAG2n +++++ cat /tmp/tmp.WgBfJ1Sgbe +++++ rm /tmp/tmp.BRei5KAG2n /tmp/tmp.WgBfJ1Sgbe +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.02SSyY7KsY ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bXx6Kvu4ox +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.02SSyY7KsY +++++ cat /tmp/tmp.bXx6Kvu4ox +++++ rm /tmp/tmp.02SSyY7KsY /tmp/tmp.bXx6Kvu4ox +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XSWL9mslbc +++ mktemp ++ local LAST_ERR=/tmp/tmp.iYZpa4vvVv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XSWL9mslbc ++ cat /tmp/tmp.iYZpa4vvVv ++ rm /tmp/tmp.XSWL9mslbc /tmp/tmp.iYZpa4vvVv ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-3.sql /tmp/tmp.k32Y9mXGJH/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.5GcTQGW71E ++ mktemp + local LAST_ERR=/tmp/tmp.xcPdHKAtAj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5GcTQGW71E secret/my-cluster-secrets patched + cat /tmp/tmp.xcPdHKAtAj + rm /tmp/tmp.5GcTQGW71E /tmp/tmp.xcPdHKAtAj + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.pIow2OtqGr +++ mktemp ++ local LAST_ERR=/tmp/tmp.hCxcw4Ojmz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pIow2OtqGr ++ cat /tmp/tmp.hCxcw4Ojmz ++ rm /tmp/tmp.pIow2OtqGr /tmp/tmp.hCxcw4Ojmz ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x1IIaERY91 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yJ7kjUKmMX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x1IIaERY91 ++ cat /tmp/tmp.yJ7kjUKmMX ++ rm /tmp/tmp.x1IIaERY91 /tmp/tmp.yJ7kjUKmMX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Xik2FxCafs +++ mktemp ++ local LAST_ERR=/tmp/tmp.aW8BGe6znw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Xik2FxCafs ++ cat /tmp/tmp.aW8BGe6znw ++ rm /tmp/tmp.Xik2FxCafs /tmp/tmp.aW8BGe6znw ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.b8ZUK8GhX4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7RXgjIFZOA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.b8ZUK8GhX4 ++ cat /tmp/tmp.7RXgjIFZOA ++ rm /tmp/tmp.b8ZUK8GhX4 /tmp/tmp.7RXgjIFZOA ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.gfbYG7xPRs ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.fqvdO3bcQM +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.gfbYG7xPRs +++++ cat /tmp/tmp.fqvdO3bcQM +++++ rm /tmp/tmp.gfbYG7xPRs /tmp/tmp.fqvdO3bcQM +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.d3REFLHJ1G ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.X0pmUq8fly +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.d3REFLHJ1G +++++ cat /tmp/tmp.X0pmUq8fly +++++ rm /tmp/tmp.d3REFLHJ1G /tmp/tmp.X0pmUq8fly +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ebdaiV41qq +++ mktemp ++ local LAST_ERR=/tmp/tmp.aflktEkSpC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ebdaiV41qq ++ cat /tmp/tmp.aflktEkSpC ++ rm /tmp/tmp.ebdaiV41qq /tmp/tmp.aflktEkSpC ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N4EA72EC2j +++ mktemp ++ local LAST_ERR=/tmp/tmp.ObFceWSU2n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N4EA72EC2j ++ cat /tmp/tmp.ObFceWSU2n ++ rm /tmp/tmp.N4EA72EC2j /tmp/tmp.ObFceWSU2n ++ return 0 + client_pod=pxc-client-64b479df95-7fssq + wait_pod pxc-client-64b479df95-7fssq + local pod=pxc-client-64b479df95-7fssq + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-7fssq ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-7fssq condition met pxc-client-64b479df95-7fssq.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-4.sql /tmp/tmp.k32Y9mXGJH/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.kEoExqqdPi ++ mktemp + local LAST_ERR=/tmp/tmp.y9bJsTSR0b + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kEoExqqdPi secret/my-cluster-secrets patched + cat /tmp/tmp.y9bJsTSR0b + rm /tmp/tmp.kEoExqqdPi /tmp/tmp.y9bJsTSR0b + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fgLZGff9BS +++ mktemp ++ local LAST_ERR=/tmp/tmp.2Sn58vCQ0G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fgLZGff9BS ++ cat /tmp/tmp.2Sn58vCQ0G ++ rm /tmp/tmp.fgLZGff9BS /tmp/tmp.2Sn58vCQ0G ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fRR9eHVRKz +++ mktemp ++ local LAST_ERR=/tmp/tmp.lMOEmXSjNl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fRR9eHVRKz ++ cat /tmp/tmp.lMOEmXSjNl ++ rm /tmp/tmp.fRR9eHVRKz /tmp/tmp.lMOEmXSjNl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1q4Xp3QJmQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.W1hAu93FdV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1q4Xp3QJmQ ++ cat /tmp/tmp.W1hAu93FdV ++ rm /tmp/tmp.1q4Xp3QJmQ /tmp/tmp.W1hAu93FdV ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C8E58EbNpa +++ mktemp ++ local LAST_ERR=/tmp/tmp.WM5FXuFqLW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.C8E58EbNpa ++ cat /tmp/tmp.WM5FXuFqLW ++ rm /tmp/tmp.C8E58EbNpa /tmp/tmp.WM5FXuFqLW ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Zvk5dKgoT3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.qErWiIZYJA +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Zvk5dKgoT3 +++++ cat /tmp/tmp.qErWiIZYJA +++++ rm /tmp/tmp.Zvk5dKgoT3 /tmp/tmp.qErWiIZYJA +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qv9zYErzBC ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.pUMtF5opsv +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qv9zYErzBC +++++ cat /tmp/tmp.pUMtF5opsv +++++ rm /tmp/tmp.qv9zYErzBC /tmp/tmp.pUMtF5opsv +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RR0UbfLrSv +++ mktemp ++ local LAST_ERR=/tmp/tmp.etNQeFbu45 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RR0UbfLrSv ++ cat /tmp/tmp.etNQeFbu45 ++ rm /tmp/tmp.RR0UbfLrSv /tmp/tmp.etNQeFbu45 ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dXX9zZo3CR +++ mktemp ++ local LAST_ERR=/tmp/tmp.YtXpEwSUv5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dXX9zZo3CR ++ cat /tmp/tmp.YtXpEwSUv5 ++ rm /tmp/tmp.dXX9zZo3CR /tmp/tmp.YtXpEwSUv5 ++ return 0 + client_pod=pxc-client-64b479df95-7fssq + wait_pod pxc-client-64b479df95-7fssq + local pod=pxc-client-64b479df95-7fssq + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-7fssq ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-7fssq condition met pxc-client-64b479df95-7fssq.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-4.sql /tmp/tmp.k32Y9mXGJH/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.dPBpfqr8KN ++ mktemp + local LAST_ERR=/tmp/tmp.pozsNMHQyp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dPBpfqr8KN perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.pozsNMHQyp + rm /tmp/tmp.dPBpfqr8KN /tmp/tmp.pozsNMHQyp + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CUksuzBQ8D +++ mktemp ++ local LAST_ERR=/tmp/tmp.IcBytcn6WT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CUksuzBQ8D ++ cat /tmp/tmp.IcBytcn6WT ++ rm /tmp/tmp.CUksuzBQ8D /tmp/tmp.IcBytcn6WT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xjuwem2hG2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ac1gpuMT8a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xjuwem2hG2 ++ cat /tmp/tmp.Ac1gpuMT8a ++ rm /tmp/tmp.xjuwem2hG2 /tmp/tmp.Ac1gpuMT8a ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ryKEZmu4c4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dt4w7uPgOK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ryKEZmu4c4 ++ cat /tmp/tmp.dt4w7uPgOK ++ rm /tmp/tmp.ryKEZmu4c4 /tmp/tmp.dt4w7uPgOK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xBblm6SAnA +++ mktemp ++ local LAST_ERR=/tmp/tmp.K9Y5OhABmg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xBblm6SAnA ++ cat /tmp/tmp.K9Y5OhABmg ++ rm /tmp/tmp.xBblm6SAnA /tmp/tmp.K9Y5OhABmg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gzZp7nPrz5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.5itrmp1TgY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gzZp7nPrz5 ++ cat /tmp/tmp.5itrmp1TgY ++ rm /tmp/tmp.gzZp7nPrz5 /tmp/tmp.5itrmp1TgY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iWymKFCaFD +++ mktemp ++ local LAST_ERR=/tmp/tmp.wdWLqnywPv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iWymKFCaFD ++ cat /tmp/tmp.wdWLqnywPv ++ rm /tmp/tmp.iWymKFCaFD /tmp/tmp.wdWLqnywPv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pq1otTlit3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.svfxTag0Kq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pq1otTlit3 ++ cat /tmp/tmp.svfxTag0Kq ++ rm /tmp/tmp.pq1otTlit3 /tmp/tmp.svfxTag0Kq ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sNWBhEN5s8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GksRGciAAO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sNWBhEN5s8 ++ cat /tmp/tmp.GksRGciAAO ++ rm /tmp/tmp.sNWBhEN5s8 /tmp/tmp.GksRGciAAO ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.XSDvLTBdxr ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4q1Dgi0Iov +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.XSDvLTBdxr +++++ cat /tmp/tmp.4q1Dgi0Iov +++++ rm /tmp/tmp.XSDvLTBdxr /tmp/tmp.4q1Dgi0Iov +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.nmNBbttXP9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.pMtOYHe6Qi +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.nmNBbttXP9 +++++ cat /tmp/tmp.pMtOYHe6Qi +++++ rm /tmp/tmp.nmNBbttXP9 /tmp/tmp.pMtOYHe6Qi +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CGfoAKhWkE +++ mktemp ++ local LAST_ERR=/tmp/tmp.7inRhIUdaG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CGfoAKhWkE ++ cat /tmp/tmp.7inRhIUdaG ++ rm /tmp/tmp.CGfoAKhWkE /tmp/tmp.7inRhIUdaG ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.RTIKrsc00t ++ mktemp + local LAST_ERR=/tmp/tmp.7s5dk5JObk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RTIKrsc00t secret/my-cluster-secrets-2 patched + cat /tmp/tmp.7s5dk5JObk + rm /tmp/tmp.RTIKrsc00t /tmp/tmp.7s5dk5JObk + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3W0yVjY87b +++ mktemp ++ local LAST_ERR=/tmp/tmp.LNuAECN562 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3W0yVjY87b ++ cat /tmp/tmp.LNuAECN562 ++ rm /tmp/tmp.3W0yVjY87b /tmp/tmp.LNuAECN562 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CFpyTPjxdj +++ mktemp ++ local LAST_ERR=/tmp/tmp.O97FqYaoMR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CFpyTPjxdj ++ cat /tmp/tmp.O97FqYaoMR ++ rm /tmp/tmp.CFpyTPjxdj /tmp/tmp.O97FqYaoMR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2Rbzjn0EJr +++ mktemp ++ local LAST_ERR=/tmp/tmp.TYik6JERoD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2Rbzjn0EJr ++ cat /tmp/tmp.TYik6JERoD ++ rm /tmp/tmp.2Rbzjn0EJr /tmp/tmp.TYik6JERoD ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Bl82hR1QPy +++ mktemp ++ local LAST_ERR=/tmp/tmp.IlliHDifKK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Bl82hR1QPy ++ cat /tmp/tmp.IlliHDifKK ++ rm /tmp/tmp.Bl82hR1QPy /tmp/tmp.IlliHDifKK ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5eNulSXzHL ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.P48SKtTxQr +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5eNulSXzHL +++++ cat /tmp/tmp.P48SKtTxQr +++++ rm /tmp/tmp.5eNulSXzHL /tmp/tmp.P48SKtTxQr +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.HhDCHaLowZ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.EhQRgZYWu9 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.HhDCHaLowZ +++++ cat /tmp/tmp.EhQRgZYWu9 +++++ rm /tmp/tmp.HhDCHaLowZ /tmp/tmp.EhQRgZYWu9 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YYXUv6j8kn +++ mktemp ++ local LAST_ERR=/tmp/tmp.nUAC6zdzw8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YYXUv6j8kn ++ cat /tmp/tmp.nUAC6zdzw8 ++ rm /tmp/tmp.YYXUv6j8kn /tmp/tmp.nUAC6zdzw8 ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nSEGGNtYu9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.OhE0inIKb7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nSEGGNtYu9 ++ cat /tmp/tmp.OhE0inIKb7 ++ rm /tmp/tmp.nSEGGNtYu9 /tmp/tmp.OhE0inIKb7 ++ return 0 + client_pod=pxc-client-64b479df95-7fssq + wait_pod pxc-client-64b479df95-7fssq + local pod=pxc-client-64b479df95-7fssq + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-7fssq ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-7fssq condition met pxc-client-64b479df95-7fssq.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-4.sql /tmp/tmp.k32Y9mXGJH/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ipu0A7gbbZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.K2LueFZUBe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ipu0A7gbbZ ++ cat /tmp/tmp.K2LueFZUBe ++ rm /tmp/tmp.Ipu0A7gbbZ /tmp/tmp.K2LueFZUBe ++ return 0 + newpass='uYm,RmJ%QS,Lsh}wW!0' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''uYm,RmJ%QS,Lsh}wW!0'\'';' '-h some-name-pxc -uroot -p'\''uYm,RmJ%QS,Lsh}wW!0'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''uYm,RmJ%QS,Lsh}wW!0'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''uYm,RmJ%QS,Lsh}wW!0'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KX8cJHihjS +++ mktemp ++ local LAST_ERR=/tmp/tmp.HMFC4e8auO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KX8cJHihjS ++ cat /tmp/tmp.HMFC4e8auO ++ rm /tmp/tmp.KX8cJHihjS /tmp/tmp.HMFC4e8auO ++ return 0 + client_pod=pxc-client-64b479df95-7fssq + wait_pod pxc-client-64b479df95-7fssq + local pod=pxc-client-64b479df95-7fssq + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-7fssq ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-7fssq condition met pxc-client-64b479df95-7fssq.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''uYm,RmJ%QS,Lsh}wW!0'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''uYm,RmJ%QS,Lsh}wW!0'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''uYm,RmJ%QS,Lsh}wW!0'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''uYm,RmJ%QS,Lsh}wW!0'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sNqL9QfQx8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WDTpzKZOug ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sNqL9QfQx8 ++ cat /tmp/tmp.WDTpzKZOug ++ rm /tmp/tmp.sNqL9QfQx8 /tmp/tmp.WDTpzKZOug ++ return 0 + client_pod=pxc-client-64b479df95-7fssq + wait_pod pxc-client-64b479df95-7fssq + local pod=pxc-client-64b479df95-7fssq + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-7fssq ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-7fssq condition met pxc-client-64b479df95-7fssq.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-4.sql /tmp/tmp.k32Y9mXGJH/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.aMJoxPGAc1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GY8yPJRZtq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aMJoxPGAc1 ++ cat /tmp/tmp.GY8yPJRZtq ++ rm /tmp/tmp.aMJoxPGAc1 /tmp/tmp.GY8yPJRZtq ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.3Vbyfyu0dC ++ mktemp + local LAST_ERR=/tmp/tmp.FQQ2MUyyua + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3Vbyfyu0dC secret/my-cluster-secrets-2 configured + cat /tmp/tmp.FQQ2MUyyua Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.3Vbyfyu0dC /tmp/tmp.FQQ2MUyyua + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kMAlxTP7JK +++ mktemp ++ local LAST_ERR=/tmp/tmp.lJO5f2MJwE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kMAlxTP7JK ++ cat /tmp/tmp.lJO5f2MJwE ++ rm /tmp/tmp.kMAlxTP7JK /tmp/tmp.lJO5f2MJwE ++ return 0 + client_pod=pxc-client-64b479df95-7fssq + wait_pod pxc-client-64b479df95-7fssq + local pod=pxc-client-64b479df95-7fssq + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-7fssq ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-7fssq condition met pxc-client-64b479df95-7fssq.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-4.sql /tmp/tmp.k32Y9mXGJH/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1735-1d5766ea#' + local LAST_OUT=/tmp/tmp.Yy3Tvm9ShC + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-11047~ + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + local LAST_ERR=/tmp/tmp.d1cIPPt0o2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Yy3Tvm9ShC perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.d1cIPPt0o2 + rm /tmp/tmp.Yy3Tvm9ShC /tmp/tmp.d1cIPPt0o2 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m4fdCPRNLa +++ mktemp ++ local LAST_ERR=/tmp/tmp.GEAjNz68FP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m4fdCPRNLa ++ cat /tmp/tmp.GEAjNz68FP ++ rm /tmp/tmp.m4fdCPRNLa /tmp/tmp.GEAjNz68FP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7gmxJUACDi +++ mktemp ++ local LAST_ERR=/tmp/tmp.zegZDkLVtu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7gmxJUACDi ++ cat /tmp/tmp.zegZDkLVtu ++ rm /tmp/tmp.7gmxJUACDi /tmp/tmp.zegZDkLVtu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PDDg5QFJSh +++ mktemp ++ local LAST_ERR=/tmp/tmp.mK0ov8deeg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PDDg5QFJSh ++ cat /tmp/tmp.mK0ov8deeg ++ rm /tmp/tmp.PDDg5QFJSh /tmp/tmp.mK0ov8deeg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CQvK8vEqle +++ mktemp ++ local LAST_ERR=/tmp/tmp.UJoyx0GvpO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CQvK8vEqle ++ cat /tmp/tmp.UJoyx0GvpO ++ rm /tmp/tmp.CQvK8vEqle /tmp/tmp.UJoyx0GvpO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w7URtff3qi +++ mktemp ++ local LAST_ERR=/tmp/tmp.D7YmjXIRhO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w7URtff3qi ++ cat /tmp/tmp.D7YmjXIRhO ++ rm /tmp/tmp.w7URtff3qi /tmp/tmp.D7YmjXIRhO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MbfQU4k21D +++ mktemp ++ local LAST_ERR=/tmp/tmp.2LXfUIjwde ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MbfQU4k21D ++ cat /tmp/tmp.2LXfUIjwde ++ rm /tmp/tmp.MbfQU4k21D /tmp/tmp.2LXfUIjwde ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XOXBWVn0B9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.noytEvO10d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XOXBWVn0B9 ++ cat /tmp/tmp.noytEvO10d ++ rm /tmp/tmp.XOXBWVn0B9 /tmp/tmp.noytEvO10d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UrWEZGWY8H +++ mktemp ++ local LAST_ERR=/tmp/tmp.77ZNHTqZwy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UrWEZGWY8H ++ cat /tmp/tmp.77ZNHTqZwy ++ rm /tmp/tmp.UrWEZGWY8H /tmp/tmp.77ZNHTqZwy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FrtX65U6OU +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ah1weLYIb8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FrtX65U6OU ++ cat /tmp/tmp.Ah1weLYIb8 ++ rm /tmp/tmp.FrtX65U6OU /tmp/tmp.Ah1weLYIb8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V8ZruSfJRa +++ mktemp ++ local LAST_ERR=/tmp/tmp.6e70D4z251 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V8ZruSfJRa ++ cat /tmp/tmp.6e70D4z251 ++ rm /tmp/tmp.V8ZruSfJRa /tmp/tmp.6e70D4z251 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4mDYznBRIj +++ mktemp ++ local LAST_ERR=/tmp/tmp.dL0bwSFcNe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4mDYznBRIj ++ cat /tmp/tmp.dL0bwSFcNe ++ rm /tmp/tmp.4mDYznBRIj /tmp/tmp.dL0bwSFcNe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bZ9s2x84jB +++ mktemp ++ local LAST_ERR=/tmp/tmp.0AuSRZXFKh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bZ9s2x84jB ++ cat /tmp/tmp.0AuSRZXFKh ++ rm /tmp/tmp.bZ9s2x84jB /tmp/tmp.0AuSRZXFKh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QkRAkpDSWN +++ mktemp ++ local LAST_ERR=/tmp/tmp.0MwIqIMKTq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QkRAkpDSWN ++ cat /tmp/tmp.0MwIqIMKTq ++ rm /tmp/tmp.QkRAkpDSWN /tmp/tmp.0MwIqIMKTq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YUjKpgUXll +++ mktemp ++ local LAST_ERR=/tmp/tmp.K9XqNkBltD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YUjKpgUXll ++ cat /tmp/tmp.K9XqNkBltD ++ rm /tmp/tmp.YUjKpgUXll /tmp/tmp.K9XqNkBltD ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UDa9vj8ALs +++ mktemp ++ local LAST_ERR=/tmp/tmp.QEIsLw9kR5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UDa9vj8ALs ++ cat /tmp/tmp.QEIsLw9kR5 ++ rm /tmp/tmp.UDa9vj8ALs /tmp/tmp.QEIsLw9kR5 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.gzTKzJLZ7S ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ENN9L2LPZO +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.gzTKzJLZ7S +++++ cat /tmp/tmp.ENN9L2LPZO +++++ rm /tmp/tmp.gzTKzJLZ7S /tmp/tmp.ENN9L2LPZO +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WrHo7cDyKj +++ mktemp ++ local LAST_ERR=/tmp/tmp.6kKRkxu8da ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WrHo7cDyKj ++ cat /tmp/tmp.6kKRkxu8da ++ rm /tmp/tmp.WrHo7cDyKj /tmp/tmp.6kKRkxu8da ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.qfTD3qBqxt ++ mktemp + local LAST_ERR=/tmp/tmp.SiG4gIdbQW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qfTD3qBqxt secret/my-cluster-secrets patched + cat /tmp/tmp.SiG4gIdbQW + rm /tmp/tmp.qfTD3qBqxt /tmp/tmp.SiG4gIdbQW + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5NvmNhJX5I +++ mktemp ++ local LAST_ERR=/tmp/tmp.8OnRwO55QT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5NvmNhJX5I ++ cat /tmp/tmp.8OnRwO55QT ++ rm /tmp/tmp.5NvmNhJX5I /tmp/tmp.8OnRwO55QT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vQTY7eM6q8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ngALtkAby8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vQTY7eM6q8 ++ cat /tmp/tmp.ngALtkAby8 ++ rm /tmp/tmp.vQTY7eM6q8 /tmp/tmp.ngALtkAby8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EHKKOTMCoU +++ mktemp ++ local LAST_ERR=/tmp/tmp.uTqXj441hu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EHKKOTMCoU ++ cat /tmp/tmp.uTqXj441hu ++ rm /tmp/tmp.EHKKOTMCoU /tmp/tmp.uTqXj441hu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B4ph6Sag5H +++ mktemp ++ local LAST_ERR=/tmp/tmp.WyuCSeD4g2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B4ph6Sag5H ++ cat /tmp/tmp.WyuCSeD4g2 ++ rm /tmp/tmp.B4ph6Sag5H /tmp/tmp.WyuCSeD4g2 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fD3byeQgZp +++ mktemp ++ local LAST_ERR=/tmp/tmp.YpS4aEroph ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fD3byeQgZp ++ cat /tmp/tmp.YpS4aEroph ++ rm /tmp/tmp.fD3byeQgZp /tmp/tmp.YpS4aEroph ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kxjLJGQ2rs ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.V3uiVi3Poi +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kxjLJGQ2rs +++++ cat /tmp/tmp.V3uiVi3Poi +++++ rm /tmp/tmp.kxjLJGQ2rs /tmp/tmp.V3uiVi3Poi +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uKesrmw9pm +++ mktemp ++ local LAST_ERR=/tmp/tmp.GIEAa7TEog ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uKesrmw9pm ++ cat /tmp/tmp.GIEAa7TEog ++ rm /tmp/tmp.uKesrmw9pm /tmp/tmp.GIEAa7TEog ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2i5sks6Woi +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ddd3aDLYx4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2i5sks6Woi ++ cat /tmp/tmp.Ddd3aDLYx4 ++ rm /tmp/tmp.2i5sks6Woi /tmp/tmp.Ddd3aDLYx4 ++ return 0 + client_pod=pxc-client-64b479df95-7fssq + wait_pod pxc-client-64b479df95-7fssq + local pod=pxc-client-64b479df95-7fssq + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-7fssq ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-7fssq condition met pxc-client-64b479df95-7fssq.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.k32Y9mXGJH/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1735/e2e-tests/users/compare/select-3.sql /tmp/tmp.k32Y9mXGJH/select-3.sql + destroy users-11047 + local namespace=users-11047 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info ++ get_operator_pod + grep -v 'get backup status: Job.batch' ++ local label_prefix=app.kubernetes.io/ + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator + grep -v 'the object has been modified' + tee /tmp/tmp.k32Y9mXGJH/operator.log +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.ESdd2wzU2F +++ mktemp ++ local LAST_ERR=/tmp/tmp.xuFpneumap ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ESdd2wzU2F ++ cat /tmp/tmp.xuFpneumap ++ rm /tmp/tmp.ESdd2wzU2F /tmp/tmp.xuFpneumap ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-74f48b466f-gc747 ++ mktemp + local LAST_OUT=/tmp/tmp.FAo2XdmoRV ++ mktemp + local LAST_ERR=/tmp/tmp.f0mTSjSqpD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-74f48b466f-gc747 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FAo2XdmoRV + cat /tmp/tmp.f0mTSjSqpD + rm /tmp/tmp.FAo2XdmoRV /tmp/tmp.f0mTSjSqpD + return 0 2024-06-24T16:43:39.961Z INFO setup Manager starting up {"gitCommit": "1d5766ea51b75d548461d608daeac472f86e89a0", "gitBranch": "PR-1735-1d5766ea", "buildTime": "2024-06-24T14:34:44Z", "goVersion": "go1.22.4", "os": "linux", "arch": "amd64"} 2024-06-24T16:43:39.961Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1436000"} 2024-06-24T16:43:39.963Z INFO setup Registering Components. 2024-06-24T16:43:44.843Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-06-24T16:43:44.847Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-06-24T16:43:44.847Z INFO controller-runtime.metrics Starting metrics server 2024-06-24T16:43:44.847Z INFO controller-runtime.webhook Starting webhook server 2024-06-24T16:43:44.847Z INFO setup Starting the Cmd. 2024-06-24T16:43:44.847Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-06-24T16:43:44.848Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-06-24T16:43:44.848Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-06-24T16:43:44.848Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-06-24T16:43:44.949Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-06-24T16:44:01.293Z DEBUG events percona-xtradb-cluster-operator-74f48b466f-gc747_9165e87d-ea09-44fa-97e0-21d9b103666c became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"607aece7-9304-4769-a935-47a39ece4e0b","apiVersion":"coordination.k8s.io/v1","resourceVersion":"69835"}, "reason": "LeaderElection"} 2024-06-24T16:44:01.293Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-06-24T16:44:01.293Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-06-24T16:44:01.295Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-06-24T16:44:01.295Z INFO Starting Controller {"controller": "pxc-controller"} 2024-06-24T16:44:01.295Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-06-24T16:44:01.300Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-06-24T16:44:01.300Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-06-24T16:44:01.509Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-06-24T16:44:01.509Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-06-24T16:44:01.509Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-06-24T16:44:12.380Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "0da43586-386a-4bb0-8f03-751baf1adeae", "err": "get primary pxc pod: not found"} 2024-06-24T16:44:22.589Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6eb9dba-57d9-48a1-b0fd-3393286a1025", "err": "get primary pxc pod: not found"} 2024-06-24T16:44:27.876Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "0eb9bdd3-d9f8-4718-9490-2bc7e19a13ed", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:05.054Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "076ee301-fb8c-459a-ab58-1cf6f7125a96", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:05.391Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "f7159545-99a4-42ce-a2a5-31f44e65542b", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:05.667Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "7deb67c5-2957-41c5-9651-7ffebafdd366", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:10.335Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "bdad4c70-07b9-4bd2-a477-22c88f5e611c", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:15.484Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c9ada7cb-4097-4bc5-9cb7-43f3764efdd1", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:15.643Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "fef5ed15-c880-4474-b9d2-4761b5e9a896", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:20.664Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "5fffa823-3813-4ab9-9a66-ed0d529457a4", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:25.827Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "65ba899c-21aa-406d-824f-09a99f6abca8", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:26.447Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "011eecbe-6bbe-4732-bb3e-78b1442c939e", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:26.890Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c8d53f5b-0bb8-439b-bed4-788e29c2ebea", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:31.088Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "f2574a66-a369-425d-affb-734565a7f731", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:36.291Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "e511a265-ad7b-41f3-957a-af0af5d72742", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:41.726Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "03c79641-148b-4d34-bff4-2eee2c13b40f", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:48.203Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "48804434-bdc0-4e80-b105-d73bc64f3a47", "err": "get primary pxc pod: not found"} 2024-06-24T16:45:52.934Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2e03c8a6-8a4a-4e5c-8537-71a45be5a963", "err": "get primary pxc pod: not found"} 2024-06-24T16:46:03.364Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "e454456c-5ddf-487a-8765-d3ef5358ad48", "err": "get primary pxc pod: not found"} 2024-06-24T16:46:24.562Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "d31bf77c-f34d-448c-a490-914303b312a4", "err": "get primary pxc pod: not found"} 2024-06-24T16:46:40.067Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "7bf39b26-216f-4485-8736-c33fbbcceb44", "err": "get primary pxc pod: not found"} 2024-06-24T16:47:01.060Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "340bc326-73e8-4e4d-850a-353be22bdca1", "err": "get primary pxc pod: not found"} 2024-06-24T16:47:17.716Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "6121df8c-2723-44c4-aa63-ea5502bfcbee", "err": "get primary pxc pod: not found"} 2024-06-24T16:47:18.138Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "817dfe4a-084e-4b7b-8969-596c4973b189", "err": "get primary pxc pod: not found"} 2024-06-24T16:47:22.178Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "817dfe4a-084e-4b7b-8969-596c4973b189", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T16:47:26.991Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c1a5f351-db8c-4b42-bd85-2ca1d99a1352", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T16:47:28.222Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "cdeafcb0-0b63-4fff-8c03-b815e1e991fb", "user": "proxyadmin"} 2024-06-24T16:47:28.222Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "cdeafcb0-0b63-4fff-8c03-b815e1e991fb", "user": "proxyadmin"} 2024-06-24T16:47:28.295Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "cdeafcb0-0b63-4fff-8c03-b815e1e991fb", "user": "proxyadmin"} 2024-06-24T16:47:28.308Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "cdeafcb0-0b63-4fff-8c03-b815e1e991fb", "user": "proxyadmin"} 2024-06-24T16:47:28.308Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "cdeafcb0-0b63-4fff-8c03-b815e1e991fb", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-06-24T16:47:28.627Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "cdeafcb0-0b63-4fff-8c03-b815e1e991fb", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T16:47:54.495Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "5854792d-08a9-47fb-907f-0ef1e2d56f29", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T16:48:00.787Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "6eb1bf06-a0b1-44a3-b08e-a30ed05a3376", "error": "exec syncusers: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-11047.svc.cluster.local:3306) to ProxySQL\nAdding user to ProxySQL: replication\n / ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:715) : Failed to add the user (replication) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-11047.svc.cluster.local:3306) to ProxySQL\nAdding user to ProxySQL: replication\n / ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:715) : Failed to add the user (replication) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T16:48:11.454Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "abd74fe9-18e1-43d7-8fc8-dc879c7b231c"} 2024-06-24T16:48:17.716Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "298289ea-bd2f-4fff-bf1d-3a2327db5443", "user": "xtrabackup"} 2024-06-24T16:48:17.746Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "298289ea-bd2f-4fff-bf1d-3a2327db5443", "user": "xtrabackup"} 2024-06-24T16:48:17.761Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "298289ea-bd2f-4fff-bf1d-3a2327db5443", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-24T16:48:17.771Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "298289ea-bd2f-4fff-bf1d-3a2327db5443", "user": "xtrabackup"} 2024-06-24T16:48:17.771Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "298289ea-bd2f-4fff-bf1d-3a2327db5443", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-06-24T16:49:58.239Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "afd3ec81-ec5f-4509-b7e2-a7266c2a88f8", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-11047 on 10.123.240.10:53: no such host"} 2024-06-24T16:50:03.256Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "803b7652-663b-4f9d-8e29-34261a104af2", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-11047 on 10.123.240.10:53: no such host"} 2024-06-24T16:50:45.748Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "6f2c0982-e90d-4e8e-97bb-479a6cdc7f8e", "primary name": "some-name-pxc-0.some-name-pxc.users-11047.svc.cluster.local"} 2024-06-24T16:50:54.533Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "11b12018-ef78-44e3-8cdb-87fb899a7711"} 2024-06-24T16:50:59.514Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "970a8eb6-737a-4e6e-967c-27e48f5edaf5"} 2024-06-24T16:51:04.826Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "88bfff44-0f53-4a67-807f-0f3d046201f9"} 2024-06-24T16:51:10.054Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "dec598d2-5e14-4c1b-9921-0849a8e35940"} 2024-06-24T16:51:11.967Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "82b4d7f9-5440-4b9d-879e-a2e4173673c6", "user": "monitor"} 2024-06-24T16:51:11.998Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "82b4d7f9-5440-4b9d-879e-a2e4173673c6", "user": "monitor"} 2024-06-24T16:51:12.010Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "82b4d7f9-5440-4b9d-879e-a2e4173673c6", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-24T16:51:12.060Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "82b4d7f9-5440-4b9d-879e-a2e4173673c6", "user": "monitor"} 2024-06-24T16:51:12.075Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "82b4d7f9-5440-4b9d-879e-a2e4173673c6", "user": "monitor"} 2024-06-24T16:51:12.075Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "82b4d7f9-5440-4b9d-879e-a2e4173673c6", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-24T16:51:15.268Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "82b4d7f9-5440-4b9d-879e-a2e4173673c6", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T16:51:39.461Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "db79859c-5db6-4b02-8030-4d2a56559cc5"} 2024-06-24T16:51:47.357Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "5fbc05fc-2437-49f7-b6f5-a01ee6aee09e"} 2024-06-24T16:51:52.445Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "3eae195d-2401-4f74-9ea0-e8347d48933b"} 2024-06-24T16:51:57.750Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "35f7441c-9801-4682-88b9-58d969b29d09"} 2024-06-24T16:51:59.513Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "bb483b9a-9f67-4344-b85e-c9c48cdb8d89", "user": "operator"} 2024-06-24T16:51:59.542Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "bb483b9a-9f67-4344-b85e-c9c48cdb8d89", "user": "operator"} 2024-06-24T16:51:59.552Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "bb483b9a-9f67-4344-b85e-c9c48cdb8d89", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-24T16:51:59.568Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "bb483b9a-9f67-4344-b85e-c9c48cdb8d89", "user": "operator"} 2024-06-24T16:51:59.568Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "bb483b9a-9f67-4344-b85e-c9c48cdb8d89", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-24T16:52:01.008Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "bb483b9a-9f67-4344-b85e-c9c48cdb8d89", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-11047.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-11047.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-11047.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-11047.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-11047.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-11047.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T16:52:45.515Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "01837eec-b7cf-4595-a469-09f41e7a5bd2"} 2024-06-24T16:52:53.312Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "76a0c645-f049-4c34-9e36-bae48b7f7072"} 2024-06-24T16:52:58.512Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "a9687005-7f51-4f88-ad69-36328e416735"} 2024-06-24T16:53:03.554Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "3912bc6f-a8c5-4a25-9bff-26856c735774"} 2024-06-24T16:53:08.827Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "a7cd20d3-4385-4245-99f3-4decf6069f92"} 2024-06-24T16:53:14.026Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "789776ef-2861-4b3d-a562-f79740e0e11a"} 2024-06-24T16:53:16.898Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "secrets": "my-cluster-secrets-2"} 2024-06-24T16:53:16.898Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "root"} 2024-06-24T16:53:16.941Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "root"} 2024-06-24T16:53:17.058Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "secret": "some-name-mysql-init", "user": "root"} 2024-06-24T16:53:18.606Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "83890246-f436-40c7-b845-6be9689b6cd0", "error": "exec syncusers: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-11047.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: root\nAdding user to ProxySQL: root\n / ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:715) : Failed to add the user (root) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-11047.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: root\nAdding user to ProxySQL: root\n / ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:715) : Failed to add the user (root) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T16:53:23.577Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d"} 2024-06-24T16:53:23.591Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "root"} 2024-06-24T16:53:23.591Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "operator"} 2024-06-24T16:53:23.620Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "operator"} 2024-06-24T16:53:23.632Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-24T16:53:23.642Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "operator"} 2024-06-24T16:53:23.642Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "monitor"} 2024-06-24T16:53:23.672Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "monitor"} 2024-06-24T16:53:23.686Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-24T16:53:23.737Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "monitor"} 2024-06-24T16:53:23.757Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "monitor"} 2024-06-24T16:53:23.757Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "xtrabackup"} 2024-06-24T16:53:23.783Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "xtrabackup"} 2024-06-24T16:53:23.793Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-24T16:53:23.804Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "xtrabackup"} 2024-06-24T16:53:23.804Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "replication"} 2024-06-24T16:53:23.833Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "replication"} 2024-06-24T16:53:23.842Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-24T16:53:23.857Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "replication"} 2024-06-24T16:53:23.857Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "proxyadmin"} 2024-06-24T16:53:23.906Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "proxyadmin"} 2024-06-24T16:53:23.917Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "user": "proxyadmin"} 2024-06-24T16:53:23.917Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "last-applied-secret": "818257632e401834260d373e2fe5c1243ca70a75fe28d7f5d13ba99e054fb2f1"} 2024-06-24T16:53:23.917Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "last-applied-secret": "818257632e401834260d373e2fe5c1243ca70a75fe28d7f5d13ba99e054fb2f1"} 2024-06-24T16:53:24.162Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "c6fffe24-7033-433e-8fd2-4efa4a35c29d", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T16:55:05.396Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "f738edb1-3569-4f83-ac66-8c03f8581908", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.181.225.6:33062: connect: connection refused"} 2024-06-24T16:55:10.612Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "0413154d-d7ce-4141-86c4-336424ca8e2c", "primary name": "some-name-pxc-0.some-name-pxc.users-11047.svc.cluster.local"} 2024-06-24T16:55:15.804Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "641e2282-5bdf-4d21-a899-591cc9bbe4eb", "primary name": "some-name-pxc-0.some-name-pxc.users-11047.svc.cluster.local"} 2024-06-24T16:55:20.998Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "cec274b5-73a4-4e9b-88e2-92490da99670", "primary name": "some-name-pxc-0.some-name-pxc.users-11047.svc.cluster.local"} 2024-06-24T16:55:26.294Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "f453e73a-a373-4dc6-b39e-e56d53e27017", "primary name": "some-name-pxc-0.some-name-pxc.users-11047.svc.cluster.local"} 2024-06-24T16:55:31.535Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "0cbc52a1-2f49-4494-99fd-75ea7706da7e", "primary name": "some-name-pxc-0.some-name-pxc.users-11047.svc.cluster.local"} 2024-06-24T16:55:36.741Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "9508e0cc-f18a-4f65-892f-2d107bcf6dc3", "primary name": "some-name-pxc-0.some-name-pxc.users-11047.svc.cluster.local"} 2024-06-24T16:55:41.979Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "690ae590-a383-4017-aa50-f3945215d7b8", "primary name": "some-name-pxc-0.some-name-pxc.users-11047.svc.cluster.local"} 2024-06-24T16:55:51.629Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "54580007-78b0-4a08-b6fd-5f6365e5f0b7"} 2024-06-24T16:55:56.455Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "43bd1b70-5414-4eef-b0ea-d5ea23729e66"} 2024-06-24T16:56:02.017Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "19c3a391-0e55-46c6-9966-99712d30fec6"} 2024-06-24T16:56:03.846Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "a48d8c1e-b8da-496b-ae33-627cd190db6f", "user": "operator"} 2024-06-24T16:56:03.877Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "a48d8c1e-b8da-496b-ae33-627cd190db6f", "user": "operator"} 2024-06-24T16:56:03.908Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "a48d8c1e-b8da-496b-ae33-627cd190db6f", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-24T16:56:03.956Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "a48d8c1e-b8da-496b-ae33-627cd190db6f", "user": "operator"} 2024-06-24T16:56:03.956Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "a48d8c1e-b8da-496b-ae33-627cd190db6f", "last-applied-secret": "20e3c82512fc4f4486c290fd639af49f838b697976b2a55e5925d470d14f9f0b"} 2024-06-24T16:56:05.529Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "a48d8c1e-b8da-496b-ae33-627cd190db6f", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-11047.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-11047.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-11047.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-11047.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-11047.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-11047.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T16:57:00.805Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "fca650cd-26cc-4dfb-b1f4-a812eb74cc47"} 2024-06-24T16:57:09.131Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "95aec780-2663-4bb5-ad3d-e598d0431e67"} 2024-06-24T16:57:14.436Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "26d817c1-53ee-47eb-af94-16c8bf588027"} 2024-06-24T16:57:19.715Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "03adaca3-f935-428c-be72-fd5288aefb14"} 2024-06-24T16:57:24.841Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "74c02faf-15ff-4b43-9a9b-6a25db3ca4d1"} 2024-06-24T16:57:30.119Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "cd5cc843-511b-4de0-a087-c39899034f80"} 2024-06-24T16:57:36.946Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "be37e1b1-c71c-4338-b147-14466ae3773e"} 2024-06-24T16:57:42.312Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "9af07774-0bc0-4113-8510-ae870769f27e"} 2024-06-24T16:57:46.622Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "64ee7716-684a-493b-9328-33954ad55443"} 2024-06-24T16:57:51.944Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "ef3b1e9e-92cb-4d8b-9f05-0a1cef6edd9d"} 2024-06-24T16:57:57.106Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "317e545d-d821-4167-af8e-5e0bc8a0cc3c"} 2024-06-24T16:58:02.464Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "07f119bf-bf73-4443-88fd-892f4766a2aa"} 2024-06-24T16:58:07.808Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "bd586f6d-b9bd-4d40-8e8d-edc0c21676b0"} 2024-06-24T16:58:13.145Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "98da24dc-5516-4fc9-8263-84d17c6f7f4b"} 2024-06-24T16:58:18.250Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "0ccf7b0d-5322-486f-ba22-d84bc60dab92"} 2024-06-24T16:58:23.714Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "dfae57aa-2b19-428a-a74b-fd19794c32b4"} 2024-06-24T16:58:25.488Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "root"} 2024-06-24T16:58:25.539Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "root"} 2024-06-24T16:58:25.549Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "secret": "some-name-mysql-init", "user": "root"} 2024-06-24T16:58:31.064Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c"} 2024-06-24T16:58:31.081Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "root"} 2024-06-24T16:58:31.081Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "monitor"} 2024-06-24T16:58:31.110Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "monitor"} 2024-06-24T16:58:31.127Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-24T16:58:31.175Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "monitor"} 2024-06-24T16:58:31.189Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "monitor"} 2024-06-24T16:58:31.189Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "xtrabackup"} 2024-06-24T16:58:31.213Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "xtrabackup"} 2024-06-24T16:58:31.223Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-24T16:58:31.246Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "xtrabackup"} 2024-06-24T16:58:31.246Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "proxyadmin"} 2024-06-24T16:58:31.297Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "proxyadmin"} 2024-06-24T16:58:31.307Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "user": "proxyadmin"} 2024-06-24T16:58:31.307Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "last-applied-secret": "37ed40b8e68ce9692edfcf2b61d24bbef79cf95478cbc407b9deb0475c780bbb"} 2024-06-24T16:58:31.307Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "last-applied-secret": "37ed40b8e68ce9692edfcf2b61d24bbef79cf95478cbc407b9deb0475c780bbb"} 2024-06-24T16:58:31.529Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "1c6554ad-bb93-4774-a27d-acba6270530c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-24T16:58:44.102Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 42c9e8b3-59a4-449e-bb92-358f52704d5b 2024-06-24T17:00:50.063Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "root"} 2024-06-24T17:00:50.100Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "root"} 2024-06-24T17:00:50.118Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "secret": "some-name-mysql-init", "user": "root"} 2024-06-24T17:00:50.143Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "root"} 2024-06-24T17:00:50.143Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "operator"} 2024-06-24T17:00:50.166Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "operator"} 2024-06-24T17:00:50.176Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-24T17:00:50.186Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "operator"} 2024-06-24T17:00:50.186Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "monitor"} 2024-06-24T17:00:50.212Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "monitor"} 2024-06-24T17:00:50.221Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-24T17:00:50.232Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "monitor"} 2024-06-24T17:00:50.232Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "xtrabackup"} 2024-06-24T17:00:50.257Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "xtrabackup"} 2024-06-24T17:00:50.265Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-24T17:00:50.276Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "xtrabackup"} 2024-06-24T17:00:50.276Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "replication"} 2024-06-24T17:00:50.302Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "replication"} 2024-06-24T17:00:50.313Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-24T17:00:50.324Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "user": "replication"} 2024-06-24T17:00:50.324Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-24T17:00:50.324Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "2a201671-fab4-45e8-b599-fe4d9ea57a85", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-24T17:03:38.857Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "9d7191ca-21c9-44c5-b2cf-253938e9503e", "user": "monitor"} 2024-06-24T17:03:38.886Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "9d7191ca-21c9-44c5-b2cf-253938e9503e", "user": "monitor"} 2024-06-24T17:03:38.900Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "9d7191ca-21c9-44c5-b2cf-253938e9503e", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-24T17:03:38.911Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "9d7191ca-21c9-44c5-b2cf-253938e9503e", "user": "monitor"} 2024-06-24T17:03:38.911Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-11047", "name": "some-name", "reconcileID": "9d7191ca-21c9-44c5-b2cf-253938e9503e", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/06/24 17:00:29 connection.go:49: read tcp 10.181.226.43:49958->10.123.254.165:3306: i/o timeout sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-11047 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.p2DzO7aK6C ++ mktemp + local LAST_ERR=/tmp/tmp.QpyYmeU4LZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.p2DzO7aK6C perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.QpyYmeU4LZ + rm /tmp/tmp.p2DzO7aK6C /tmp/tmp.QpyYmeU4LZ + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.8Cq1TroVho ++ mktemp + local LAST_ERR=/tmp/tmp.dzxTFc02os + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8Cq1TroVho No resources found + cat /tmp/tmp.dzxTFc02os + rm /tmp/tmp.8Cq1TroVho /tmp/tmp.dzxTFc02os + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.V8vjr6Uv46 ++ mktemp + local LAST_ERR=/tmp/tmp.5Qmxuj4UAA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.V8vjr6Uv46 No resources found + cat /tmp/tmp.5Qmxuj4UAA + rm /tmp/tmp.V8vjr6Uv46 /tmp/tmp.5Qmxuj4UAA + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.107IFTvToz ++ mktemp + local LAST_ERR=/tmp/tmp.hiNsMOtclE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.107IFTvToz validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.hiNsMOtclE + rm /tmp/tmp.107IFTvToz /tmp/tmp.hiNsMOtclE + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-11047 + rm -rf /tmp/tmp.k32Y9mXGJH + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.dFc6iANfBx + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.njfAGpQGf9 ++ mktemp + local LAST_ERR=/tmp/tmp.ruGLHZ2BkO + local exit_status=0 + local LAST_ERR=/tmp/tmp.0m7M4Sgsu9 ++ seq 0 2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-11047