Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/logs/users-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-8381 + local ns=users-8381 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-15683 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.qeucsxISy9 ++ mktemp + local LAST_ERR=/tmp/tmp.UK14fMWTFl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qeucsxISy9 perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.UK14fMWTFl + rm /tmp/tmp.qeucsxISy9 /tmp/tmp.UK14fMWTFl + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.50bAJNIdMq ++ mktemp + local LAST_ERR=/tmp/tmp.CuQU0R329j + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.50bAJNIdMq No resources found + cat /tmp/tmp.CuQU0R329j + rm /tmp/tmp.50bAJNIdMq /tmp/tmp.CuQU0R329j + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.56OzUX5uj1 ++ mktemp + local LAST_ERR=/tmp/tmp.tPq97TI8cH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.56OzUX5uj1 No resources found + cat /tmp/tmp.tPq97TI8cH + rm /tmp/tmp.56OzUX5uj1 /tmp/tmp.tPq97TI8cH + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.YkzOIl8Dtz + local LAST_OUT=/tmp/tmp.rkHCL2XP2N ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.ugMi93Fd24 + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.tSqcMxJ9mF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rkHCL2XP2N + cat /tmp/tmp.ugMi93Fd24 + rm /tmp/tmp.rkHCL2XP2N /tmp/tmp.ugMi93Fd24 + return 0 namespace "users-15683" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YkzOIl8Dtz namespace "pxc-operator" deleted + cat /tmp/tmp.tSqcMxJ9mF + rm /tmp/tmp.YkzOIl8Dtz /tmp/tmp.tSqcMxJ9mF + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.1enJDRXGd5 ++ mktemp + local LAST_ERR=/tmp/tmp.4E15mUHXVV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1enJDRXGd5 namespace/pxc-operator created + cat /tmp/tmp.4E15mUHXVV + rm /tmp/tmp.1enJDRXGd5 /tmp/tmp.4E15mUHXVV + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.mSopiKQpJF +++ mktemp ++ local LAST_ERR=/tmp/tmp.NrOzYVs37v ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mSopiKQpJF ++ cat /tmp/tmp.NrOzYVs37v ++ rm /tmp/tmp.mSopiKQpJF /tmp/tmp.NrOzYVs37v ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1724-95c26a2c-3-cluster6 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.cVTaPfDTGX ++ mktemp + local LAST_ERR=/tmp/tmp.Qs1IAkFnfg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1724-95c26a2c-3-cluster6 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cVTaPfDTGX Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1724-95c26a2c-3-cluster6" modified. + cat /tmp/tmp.Qs1IAkFnfg + rm /tmp/tmp.cVTaPfDTGX /tmp/tmp.Qs1IAkFnfg + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.Zt5mKDivKl ++ mktemp + local LAST_ERR=/tmp/tmp.bqlzLor35S + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Zt5mKDivKl customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.bqlzLor35S + rm /tmp/tmp.Zt5mKDivKl /tmp/tmp.bqlzLor35S + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.oEGJp2gMXU ++ mktemp + local LAST_ERR=/tmp/tmp.upF4kOkCAi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oEGJp2gMXU clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.upF4kOkCAi + rm /tmp/tmp.oEGJp2gMXU /tmp/tmp.upF4kOkCAi + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1724-95c26a2c^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + local LAST_OUT=/tmp/tmp.90nAZD1kwj ++ mktemp + local LAST_ERR=/tmp/tmp.rPxBJ1WgT6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.90nAZD1kwj deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.rPxBJ1WgT6 + rm /tmp/tmp.90nAZD1kwj /tmp/tmp.rPxBJ1WgT6 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.BRqjJirpq3 ++ mktemp + local LAST_ERR=/tmp/tmp.vAWvsBGtHx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BRqjJirpq3 pod/percona-xtradb-cluster-operator-b98bcb965-c7ldl condition met + cat /tmp/tmp.vAWvsBGtHx + rm /tmp/tmp.BRqjJirpq3 /tmp/tmp.vAWvsBGtHx + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.gvGURMHy3B +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q98tFcUCaO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gvGURMHy3B ++ cat /tmp/tmp.Q98tFcUCaO ++ rm /tmp/tmp.gvGURMHy3B /tmp/tmp.Q98tFcUCaO ++ return 0 + wait_pod percona-xtradb-cluster-operator-b98bcb965-c7ldl 480 pxc-operator + local pod=percona-xtradb-cluster-operator-b98bcb965-c7ldl + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-b98bcb965-c7ldl ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-b98bcb965-c7ldl condition met percona-xtradb-cluster-operator-b98bcb965-c7ldl.Ok + sleep 3 + create_namespace users-8381 + local namespace=users-8381 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-8381' ++ mktemp + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-8381 ----------------------------------------------------------------------------------- + xargs kubectl delete ns + kubectl_bin delete namespace users-8381 + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + local LAST_OUT=/tmp/tmp.i1JTvDjAxn ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.sYy0zuHYi0 ++ mktemp + local LAST_ERR=/tmp/tmp.RNrxALLJC6 + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.CCXriokbTE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-8381 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-8381 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.i1JTvDjAxn + cat /tmp/tmp.RNrxALLJC6 + rm /tmp/tmp.i1JTvDjAxn /tmp/tmp.RNrxALLJC6 + return 0 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-8381 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.sYy0zuHYi0 + cat /tmp/tmp.CCXriokbTE Error from server (NotFound): namespaces "users-8381" not found + rm /tmp/tmp.sYy0zuHYi0 /tmp/tmp.CCXriokbTE + return 1 + : + wait_for_delete namespace/users-8381 + local res=namespace/users-8381 + echo -n 'namespace/users-8381 - ' namespace/users-8381 - + set +o xtrace Error from server (NotFound): namespaces "users-8381" not found + desc 'create namespace users-8381' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-8381 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-8381 ++ mktemp + local LAST_OUT=/tmp/tmp.eVH8FrOYf3 ++ mktemp + local LAST_ERR=/tmp/tmp.HSTLKXsGw5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-8381 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eVH8FrOYf3 namespace/users-8381 created + cat /tmp/tmp.HSTLKXsGw5 + rm /tmp/tmp.eVH8FrOYf3 /tmp/tmp.HSTLKXsGw5 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Gmsrhzx92m +++ mktemp ++ local LAST_ERR=/tmp/tmp.OUKRcucYSv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Gmsrhzx92m ++ cat /tmp/tmp.OUKRcucYSv ++ rm /tmp/tmp.Gmsrhzx92m /tmp/tmp.OUKRcucYSv ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1724-95c26a2c-3-cluster6 --namespace=users-8381 ++ mktemp + local LAST_OUT=/tmp/tmp.nZkCfr5Sf5 ++ mktemp + local LAST_ERR=/tmp/tmp.SMC5bKZ0mO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1724-95c26a2c-3-cluster6 --namespace=users-8381 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nZkCfr5Sf5 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1724-95c26a2c-3-cluster6" modified. + cat /tmp/tmp.SMC5bKZ0mO + rm /tmp/tmp.nZkCfr5Sf5 /tmp/tmp.SMC5bKZ0mO + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.wFtvqzc4r7 ++ mktemp + local LAST_ERR=/tmp/tmp.gg2ToBlzKP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wFtvqzc4r7 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.gg2ToBlzKP + rm /tmp/tmp.wFtvqzc4r7 /tmp/tmp.gg2ToBlzKP + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ElNVCuJSvt ++ mktemp + local LAST_ERR=/tmp/tmp.nwgNTczrQA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ElNVCuJSvt secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.nwgNTczrQA + rm /tmp/tmp.ElNVCuJSvt /tmp/tmp.nwgNTczrQA + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1724-95c26a2c#' + local LAST_OUT=/tmp/tmp.eXbC0CnoTN + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-8381~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_ERR=/tmp/tmp.3j8tcR2sLD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eXbC0CnoTN deployment.apps/pxc-client created + cat /tmp/tmp.3j8tcR2sLD + rm /tmp/tmp.eXbC0CnoTN /tmp/tmp.3j8tcR2sLD + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.kpFL9Cp0Gh + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1724-95c26a2c#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-8381~ + local LAST_ERR=/tmp/tmp.8GXLeDbWuP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kpFL9Cp0Gh perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.8GXLeDbWuP + rm /tmp/tmp.kpFL9Cp0Gh /tmp/tmp.8GXLeDbWuP + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.4QhcQPcmc6 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.UkSMQuLeI6 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.4QhcQPcmc6 +++ cat /tmp/tmp.UkSMQuLeI6 +++ rm /tmp/tmp.4QhcQPcmc6 /tmp/tmp.UkSMQuLeI6 +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.OwzhUzXo0j ++++ mktemp +++ local LAST_ERR=/tmp/tmp.OC437O3C6w +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.OwzhUzXo0j +++ cat /tmp/tmp.OC437O3C6w +++ rm /tmp/tmp.OwzhUzXo0j /tmp/tmp.OC437O3C6w +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-8381 ++ mktemp + local LAST_OUT=/tmp/tmp.6FdDULijot ++ mktemp + local LAST_ERR=/tmp/tmp.rZ7oDBJTTS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-8381 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-8381 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-8381 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.6FdDULijot + cat /tmp/tmp.rZ7oDBJTTS error: no matching resources found + rm /tmp/tmp.6FdDULijot /tmp/tmp.rZ7oDBJTTS + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8SbzqR8quh +++ mktemp ++ local LAST_ERR=/tmp/tmp.AEh5MM5gJH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8SbzqR8quh ++ cat /tmp/tmp.AEh5MM5gJH ++ rm /tmp/tmp.8SbzqR8quh /tmp/tmp.AEh5MM5gJH ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5FYfKE2ASV +++ mktemp ++ local LAST_ERR=/tmp/tmp.5mdxMVDa0Z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5FYfKE2ASV ++ cat /tmp/tmp.5mdxMVDa0Z ++ rm /tmp/tmp.5FYfKE2ASV /tmp/tmp.5mdxMVDa0Z ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo pxc-client-6644d8898f-lfgck ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rCeZhZTpve +++ mktemp ++ local LAST_ERR=/tmp/tmp.OZq8tzSjaL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rCeZhZTpve ++ cat /tmp/tmp.OZq8tzSjaL ++ rm /tmp/tmp.rCeZhZTpve /tmp/tmp.OZq8tzSjaL ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-1.sql /tmp/tmp.BIh0IyeRrW/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3y12SnPf6D +++ mktemp ++ local LAST_ERR=/tmp/tmp.T8sv3rLCKW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3y12SnPf6D ++ cat /tmp/tmp.T8sv3rLCKW ++ rm /tmp/tmp.3y12SnPf6D /tmp/tmp.T8sv3rLCKW ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-1.sql /tmp/tmp.BIh0IyeRrW/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MzvT2KEFT3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.dWNezeoOBe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MzvT2KEFT3 ++ cat /tmp/tmp.dWNezeoOBe ++ rm /tmp/tmp.MzvT2KEFT3 /tmp/tmp.dWNezeoOBe ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-1.sql /tmp/tmp.BIh0IyeRrW/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fDoXBYkEs1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zh0d0bthFZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fDoXBYkEs1 ++ cat /tmp/tmp.zh0d0bthFZ Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.fDoXBYkEs1 /tmp/tmp.zh0d0bthFZ ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.9Zj9dei5vd ++ mktemp + local LAST_ERR=/tmp/tmp.xpNdB4SE1j + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9Zj9dei5vd secret/my-cluster-secrets patched + cat /tmp/tmp.xpNdB4SE1j + rm /tmp/tmp.9Zj9dei5vd /tmp/tmp.xpNdB4SE1j + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sAxXo2Derr +++ mktemp ++ local LAST_ERR=/tmp/tmp.BiVEJd1WT8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sAxXo2Derr ++ cat /tmp/tmp.BiVEJd1WT8 ++ rm /tmp/tmp.sAxXo2Derr /tmp/tmp.BiVEJd1WT8 ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.BIh0IyeRrW/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.NEjEqgK1KZ ++ mktemp + local LAST_ERR=/tmp/tmp.FPqVvluD3M + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NEjEqgK1KZ perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.FPqVvluD3M + rm /tmp/tmp.NEjEqgK1KZ /tmp/tmp.FPqVvluD3M + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m3eFJ2iwcG +++ mktemp ++ local LAST_ERR=/tmp/tmp.B37kZnX1xq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m3eFJ2iwcG ++ cat /tmp/tmp.B37kZnX1xq ++ rm /tmp/tmp.m3eFJ2iwcG /tmp/tmp.B37kZnX1xq ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PbFeXDPDSE +++ mktemp ++ local LAST_ERR=/tmp/tmp.qIh5nKBkpe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PbFeXDPDSE ++ cat /tmp/tmp.qIh5nKBkpe ++ rm /tmp/tmp.PbFeXDPDSE /tmp/tmp.qIh5nKBkpe ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.BVtLPU19ai ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZFwzi0OSvh +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.BVtLPU19ai +++++ cat /tmp/tmp.ZFwzi0OSvh +++++ rm /tmp/tmp.BVtLPU19ai /tmp/tmp.ZFwzi0OSvh +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mucWPZSrnC ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Q5IS5919Zm +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mucWPZSrnC +++++ cat /tmp/tmp.Q5IS5919Zm +++++ rm /tmp/tmp.mucWPZSrnC /tmp/tmp.Q5IS5919Zm +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vx9Q3Lzmpm +++ mktemp ++ local LAST_ERR=/tmp/tmp.cqrhj8pZDB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vx9Q3Lzmpm ++ cat /tmp/tmp.cqrhj8pZDB ++ rm /tmp/tmp.vx9Q3Lzmpm /tmp/tmp.cqrhj8pZDB ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.mUB3GXbtB6 ++ mktemp + local LAST_ERR=/tmp/tmp.1ahLt33a1k + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mUB3GXbtB6 secret/my-cluster-secrets patched + cat /tmp/tmp.1ahLt33a1k + rm /tmp/tmp.mUB3GXbtB6 /tmp/tmp.1ahLt33a1k + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7fO7Wan1ZN +++ mktemp ++ local LAST_ERR=/tmp/tmp.O1PXoBakza ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7fO7Wan1ZN ++ cat /tmp/tmp.O1PXoBakza ++ rm /tmp/tmp.7fO7Wan1ZN /tmp/tmp.O1PXoBakza ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.s7rjZ8X5cY +++ mktemp ++ local LAST_ERR=/tmp/tmp.LJV5Ojf62H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.s7rjZ8X5cY ++ cat /tmp/tmp.LJV5Ojf62H ++ rm /tmp/tmp.s7rjZ8X5cY /tmp/tmp.LJV5Ojf62H ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z7nNuWqeA4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NbCZRgQ3ka ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z7nNuWqeA4 ++ cat /tmp/tmp.NbCZRgQ3ka ++ rm /tmp/tmp.Z7nNuWqeA4 /tmp/tmp.NbCZRgQ3ka ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8KRINrFr5E ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.eFYBWklJCy +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8KRINrFr5E +++++ cat /tmp/tmp.eFYBWklJCy +++++ rm /tmp/tmp.8KRINrFr5E /tmp/tmp.eFYBWklJCy +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.MHpU3JHwQU ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.3Tq49TRG7U +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.MHpU3JHwQU +++++ cat /tmp/tmp.3Tq49TRG7U +++++ rm /tmp/tmp.MHpU3JHwQU /tmp/tmp.3Tq49TRG7U +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wkFnCH4DPr +++ mktemp ++ local LAST_ERR=/tmp/tmp.YBVV4Yyhdw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wkFnCH4DPr ++ cat /tmp/tmp.YBVV4Yyhdw ++ rm /tmp/tmp.wkFnCH4DPr /tmp/tmp.YBVV4Yyhdw ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-2.sql /tmp/tmp.BIh0IyeRrW/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-2.sql /tmp/tmp.BIh0IyeRrW/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-2.sql /tmp/tmp.BIh0IyeRrW/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.B1T07RX09w ++ mktemp + local LAST_ERR=/tmp/tmp.0t6xLMgqQc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.B1T07RX09w perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.0t6xLMgqQc + rm /tmp/tmp.B1T07RX09w /tmp/tmp.0t6xLMgqQc + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.NWRp4rrN59 ++ mktemp + local LAST_ERR=/tmp/tmp.Z02dAAAgFG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NWRp4rrN59 secret/my-cluster-secrets patched + cat /tmp/tmp.Z02dAAAgFG + rm /tmp/tmp.NWRp4rrN59 /tmp/tmp.Z02dAAAgFG + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CUK9MoQkEN +++ mktemp ++ local LAST_ERR=/tmp/tmp.vMpwRfxbDB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CUK9MoQkEN ++ cat /tmp/tmp.vMpwRfxbDB ++ rm /tmp/tmp.CUK9MoQkEN /tmp/tmp.vMpwRfxbDB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FuAoMs94OK +++ mktemp ++ local LAST_ERR=/tmp/tmp.Avu5M9PEkB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FuAoMs94OK ++ cat /tmp/tmp.Avu5M9PEkB ++ rm /tmp/tmp.FuAoMs94OK /tmp/tmp.Avu5M9PEkB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Kn3dzH9JPD +++ mktemp ++ local LAST_ERR=/tmp/tmp.xlQm2ASJmE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Kn3dzH9JPD ++ cat /tmp/tmp.xlQm2ASJmE ++ rm /tmp/tmp.Kn3dzH9JPD /tmp/tmp.xlQm2ASJmE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WZFM95Sz6G +++ mktemp ++ local LAST_ERR=/tmp/tmp.HaQdHqFc8T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WZFM95Sz6G ++ cat /tmp/tmp.HaQdHqFc8T ++ rm /tmp/tmp.WZFM95Sz6G /tmp/tmp.HaQdHqFc8T ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vrJwIJKlAQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.HDJWNbqP4c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vrJwIJKlAQ ++ cat /tmp/tmp.HDJWNbqP4c ++ rm /tmp/tmp.vrJwIJKlAQ /tmp/tmp.HDJWNbqP4c ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FGRoitg8YO +++ mktemp ++ local LAST_ERR=/tmp/tmp.2cRaRypnUg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FGRoitg8YO ++ cat /tmp/tmp.2cRaRypnUg ++ rm /tmp/tmp.FGRoitg8YO /tmp/tmp.2cRaRypnUg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XgdwYdnqdy +++ mktemp ++ local LAST_ERR=/tmp/tmp.GawMgC3phP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XgdwYdnqdy ++ cat /tmp/tmp.GawMgC3phP ++ rm /tmp/tmp.XgdwYdnqdy /tmp/tmp.GawMgC3phP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qm2ln0DwBj +++ mktemp ++ local LAST_ERR=/tmp/tmp.XuANrhf7Ee ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qm2ln0DwBj ++ cat /tmp/tmp.XuANrhf7Ee ++ rm /tmp/tmp.qm2ln0DwBj /tmp/tmp.XuANrhf7Ee ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NDGLSfhSqE +++ mktemp ++ local LAST_ERR=/tmp/tmp.ab4drkcgxQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NDGLSfhSqE ++ cat /tmp/tmp.ab4drkcgxQ ++ rm /tmp/tmp.NDGLSfhSqE /tmp/tmp.ab4drkcgxQ ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ekSTAevUlX ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xZoQ188l8X +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ekSTAevUlX +++++ cat /tmp/tmp.xZoQ188l8X +++++ rm /tmp/tmp.ekSTAevUlX /tmp/tmp.xZoQ188l8X +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.NtbocfIbxj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.XKuaH54ssQ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.NtbocfIbxj +++++ cat /tmp/tmp.XKuaH54ssQ +++++ rm /tmp/tmp.NtbocfIbxj /tmp/tmp.XKuaH54ssQ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bdwEmO7NZn +++ mktemp ++ local LAST_ERR=/tmp/tmp.L67GIGTn6b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bdwEmO7NZn ++ cat /tmp/tmp.L67GIGTn6b ++ rm /tmp/tmp.bdwEmO7NZn /tmp/tmp.L67GIGTn6b ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-3.sql /tmp/tmp.BIh0IyeRrW/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.NOP2LECYLJ ++ mktemp + local LAST_ERR=/tmp/tmp.R0FWX2LYCz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NOP2LECYLJ secret/my-cluster-secrets patched + cat /tmp/tmp.R0FWX2LYCz + rm /tmp/tmp.NOP2LECYLJ /tmp/tmp.R0FWX2LYCz + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.7ksbzm8fUp +++ mktemp ++ local LAST_ERR=/tmp/tmp.725BXdNkfq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7ksbzm8fUp ++ cat /tmp/tmp.725BXdNkfq ++ rm /tmp/tmp.7ksbzm8fUp /tmp/tmp.725BXdNkfq ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0wDZobYEd6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3XSuAcNdIT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0wDZobYEd6 ++ cat /tmp/tmp.3XSuAcNdIT ++ rm /tmp/tmp.0wDZobYEd6 /tmp/tmp.3XSuAcNdIT ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TXx5tr62d6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y2XPGiZG5H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TXx5tr62d6 ++ cat /tmp/tmp.Y2XPGiZG5H ++ rm /tmp/tmp.TXx5tr62d6 /tmp/tmp.Y2XPGiZG5H ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UsNQn2wJfu +++ mktemp ++ local LAST_ERR=/tmp/tmp.z0zRE3fabZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UsNQn2wJfu ++ cat /tmp/tmp.z0zRE3fabZ ++ rm /tmp/tmp.UsNQn2wJfu /tmp/tmp.z0zRE3fabZ ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oznlC2q9ps +++ mktemp ++ local LAST_ERR=/tmp/tmp.dSW77JuW83 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oznlC2q9ps ++ cat /tmp/tmp.dSW77JuW83 ++ rm /tmp/tmp.oznlC2q9ps /tmp/tmp.dSW77JuW83 ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tpIzGTDvaU +++ mktemp ++ local LAST_ERR=/tmp/tmp.xhOpItX0hE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tpIzGTDvaU ++ cat /tmp/tmp.xhOpItX0hE ++ rm /tmp/tmp.tpIzGTDvaU /tmp/tmp.xhOpItX0hE ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KavfVWRdrk +++ mktemp ++ local LAST_ERR=/tmp/tmp.pVWKgQzCCj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KavfVWRdrk ++ cat /tmp/tmp.pVWKgQzCCj ++ rm /tmp/tmp.KavfVWRdrk /tmp/tmp.pVWKgQzCCj ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.14yvWsIfJE +++ mktemp ++ local LAST_ERR=/tmp/tmp.hxloW1gz8I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.14yvWsIfJE ++ cat /tmp/tmp.hxloW1gz8I ++ rm /tmp/tmp.14yvWsIfJE /tmp/tmp.hxloW1gz8I ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J5XwLcImjU +++ mktemp ++ local LAST_ERR=/tmp/tmp.k9xWwnizPg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J5XwLcImjU ++ cat /tmp/tmp.k9xWwnizPg ++ rm /tmp/tmp.J5XwLcImjU /tmp/tmp.k9xWwnizPg ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OZbkMOHAb1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GrFk3gyrZ5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OZbkMOHAb1 ++ cat /tmp/tmp.GrFk3gyrZ5 ++ rm /tmp/tmp.OZbkMOHAb1 /tmp/tmp.GrFk3gyrZ5 ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ck3sqtZSnU +++ mktemp ++ local LAST_ERR=/tmp/tmp.w8RHrYTXDX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ck3sqtZSnU ++ cat /tmp/tmp.w8RHrYTXDX ++ rm /tmp/tmp.Ck3sqtZSnU /tmp/tmp.w8RHrYTXDX ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YE4Fh22CWk +++ mktemp ++ local LAST_ERR=/tmp/tmp.T9sGcFNuBy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YE4Fh22CWk ++ cat /tmp/tmp.T9sGcFNuBy ++ rm /tmp/tmp.YE4Fh22CWk /tmp/tmp.T9sGcFNuBy ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hK29k3sHdx +++ mktemp ++ local LAST_ERR=/tmp/tmp.Rr1oViTzdH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hK29k3sHdx ++ cat /tmp/tmp.Rr1oViTzdH ++ rm /tmp/tmp.hK29k3sHdx /tmp/tmp.Rr1oViTzdH ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XJmIWVVc3z +++ mktemp ++ local LAST_ERR=/tmp/tmp.v2LKAyeysj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XJmIWVVc3z ++ cat /tmp/tmp.v2LKAyeysj ++ rm /tmp/tmp.XJmIWVVc3z /tmp/tmp.v2LKAyeysj ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JIdkm5aMzI +++ mktemp ++ local LAST_ERR=/tmp/tmp.iu9zBxVrpe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JIdkm5aMzI ++ cat /tmp/tmp.iu9zBxVrpe ++ rm /tmp/tmp.JIdkm5aMzI /tmp/tmp.iu9zBxVrpe ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TXZZjsKRFP +++ mktemp ++ local LAST_ERR=/tmp/tmp.3XTw5QtMym ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TXZZjsKRFP ++ cat /tmp/tmp.3XTw5QtMym ++ rm /tmp/tmp.TXZZjsKRFP /tmp/tmp.3XTw5QtMym ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gQCZTXSFLo +++ mktemp ++ local LAST_ERR=/tmp/tmp.pASRjAXLoy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gQCZTXSFLo ++ cat /tmp/tmp.pASRjAXLoy ++ rm /tmp/tmp.gQCZTXSFLo /tmp/tmp.pASRjAXLoy ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tlQxNtSSIS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.qETc5dY7W5 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tlQxNtSSIS +++++ cat /tmp/tmp.qETc5dY7W5 +++++ rm /tmp/tmp.tlQxNtSSIS /tmp/tmp.qETc5dY7W5 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.0qGLHSQRJi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.6SnG6k7K1i +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.0qGLHSQRJi +++++ cat /tmp/tmp.6SnG6k7K1i +++++ rm /tmp/tmp.0qGLHSQRJi /tmp/tmp.6SnG6k7K1i +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3n16GCT6tl +++ mktemp ++ local LAST_ERR=/tmp/tmp.fP2dbmzHXS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3n16GCT6tl ++ cat /tmp/tmp.fP2dbmzHXS ++ rm /tmp/tmp.3n16GCT6tl /tmp/tmp.fP2dbmzHXS ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pUZPgu2d8J +++ mktemp ++ local LAST_ERR=/tmp/tmp.tRgFMyaDVB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pUZPgu2d8J ++ cat /tmp/tmp.tRgFMyaDVB ++ rm /tmp/tmp.pUZPgu2d8J /tmp/tmp.tRgFMyaDVB ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.BIh0IyeRrW/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.20iX8XSwE1 ++ mktemp + local LAST_ERR=/tmp/tmp.Y5hCCpLPhD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.20iX8XSwE1 secret/my-cluster-secrets patched + cat /tmp/tmp.Y5hCCpLPhD + rm /tmp/tmp.20iX8XSwE1 /tmp/tmp.Y5hCCpLPhD + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FZUW0iq2xX +++ mktemp ++ local LAST_ERR=/tmp/tmp.Zl903Y5dRz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FZUW0iq2xX ++ cat /tmp/tmp.Zl903Y5dRz ++ rm /tmp/tmp.FZUW0iq2xX /tmp/tmp.Zl903Y5dRz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6pfifZbnDd +++ mktemp ++ local LAST_ERR=/tmp/tmp.ehgNMF714E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6pfifZbnDd ++ cat /tmp/tmp.ehgNMF714E ++ rm /tmp/tmp.6pfifZbnDd /tmp/tmp.ehgNMF714E ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.k0CRwXnqxf +++ mktemp ++ local LAST_ERR=/tmp/tmp.kZ3EHv5g5d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.k0CRwXnqxf ++ cat /tmp/tmp.kZ3EHv5g5d ++ rm /tmp/tmp.k0CRwXnqxf /tmp/tmp.kZ3EHv5g5d ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.LO9olk0hhF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.opF538OIfL +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.LO9olk0hhF +++++ cat /tmp/tmp.opF538OIfL +++++ rm /tmp/tmp.LO9olk0hhF /tmp/tmp.opF538OIfL +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.PJPuAx3nv3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MV2Hm5ySmH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.PJPuAx3nv3 +++++ cat /tmp/tmp.MV2Hm5ySmH +++++ rm /tmp/tmp.PJPuAx3nv3 /tmp/tmp.MV2Hm5ySmH +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hptAayuGUU +++ mktemp ++ local LAST_ERR=/tmp/tmp.waT0KCxQWA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hptAayuGUU ++ cat /tmp/tmp.waT0KCxQWA ++ rm /tmp/tmp.hptAayuGUU /tmp/tmp.waT0KCxQWA ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.E3m90042W1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ahiAk83Rpl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E3m90042W1 ++ cat /tmp/tmp.ahiAk83Rpl ++ rm /tmp/tmp.E3m90042W1 /tmp/tmp.ahiAk83Rpl ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.BIh0IyeRrW/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.d3PiMvXeo2 ++ mktemp + local LAST_ERR=/tmp/tmp.f3HSkYEPKD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.d3PiMvXeo2 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.f3HSkYEPKD + rm /tmp/tmp.d3PiMvXeo2 /tmp/tmp.f3HSkYEPKD + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gwluQcmcvd +++ mktemp ++ local LAST_ERR=/tmp/tmp.4WkXGJsXg3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gwluQcmcvd ++ cat /tmp/tmp.4WkXGJsXg3 ++ rm /tmp/tmp.gwluQcmcvd /tmp/tmp.4WkXGJsXg3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GpkrCUpOjL +++ mktemp ++ local LAST_ERR=/tmp/tmp.LiH34JG0K3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GpkrCUpOjL ++ cat /tmp/tmp.LiH34JG0K3 ++ rm /tmp/tmp.GpkrCUpOjL /tmp/tmp.LiH34JG0K3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cxEkrERKq8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4ietaXyilN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cxEkrERKq8 ++ cat /tmp/tmp.4ietaXyilN ++ rm /tmp/tmp.cxEkrERKq8 /tmp/tmp.4ietaXyilN ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gPpwM0DdHC +++ mktemp ++ local LAST_ERR=/tmp/tmp.mts1jTpTFl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gPpwM0DdHC ++ cat /tmp/tmp.mts1jTpTFl ++ rm /tmp/tmp.gPpwM0DdHC /tmp/tmp.mts1jTpTFl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jIGIaqKzzj +++ mktemp ++ local LAST_ERR=/tmp/tmp.CR9JQxMVEG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jIGIaqKzzj ++ cat /tmp/tmp.CR9JQxMVEG ++ rm /tmp/tmp.jIGIaqKzzj /tmp/tmp.CR9JQxMVEG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B6AyjJ0Wak +++ mktemp ++ local LAST_ERR=/tmp/tmp.PYALH0v1ri ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B6AyjJ0Wak ++ cat /tmp/tmp.PYALH0v1ri ++ rm /tmp/tmp.B6AyjJ0Wak /tmp/tmp.PYALH0v1ri ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Dsbop0WFZN +++ mktemp ++ local LAST_ERR=/tmp/tmp.g6DkeofluL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Dsbop0WFZN ++ cat /tmp/tmp.g6DkeofluL ++ rm /tmp/tmp.Dsbop0WFZN /tmp/tmp.g6DkeofluL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MlhMoSxXlO +++ mktemp ++ local LAST_ERR=/tmp/tmp.5tr2WjwAXA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MlhMoSxXlO ++ cat /tmp/tmp.5tr2WjwAXA ++ rm /tmp/tmp.MlhMoSxXlO /tmp/tmp.5tr2WjwAXA ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.R2H4aBsjeG ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.AkpYIVVTTH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.R2H4aBsjeG +++++ cat /tmp/tmp.AkpYIVVTTH +++++ rm /tmp/tmp.R2H4aBsjeG /tmp/tmp.AkpYIVVTTH +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GjVDbu2ztm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.QS4ZLDSZYE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GjVDbu2ztm +++++ cat /tmp/tmp.QS4ZLDSZYE +++++ rm /tmp/tmp.GjVDbu2ztm /tmp/tmp.QS4ZLDSZYE +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vooF2QBeqM +++ mktemp ++ local LAST_ERR=/tmp/tmp.fUnXfF8Oy1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vooF2QBeqM ++ cat /tmp/tmp.fUnXfF8Oy1 ++ rm /tmp/tmp.vooF2QBeqM /tmp/tmp.fUnXfF8Oy1 ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.1H21zvqqko ++ mktemp + local LAST_ERR=/tmp/tmp.VpSWlyrpyE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1H21zvqqko secret/my-cluster-secrets-2 patched + cat /tmp/tmp.VpSWlyrpyE + rm /tmp/tmp.1H21zvqqko /tmp/tmp.VpSWlyrpyE + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vQVH172m6f +++ mktemp ++ local LAST_ERR=/tmp/tmp.XfroabgkrH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vQVH172m6f ++ cat /tmp/tmp.XfroabgkrH ++ rm /tmp/tmp.vQVH172m6f /tmp/tmp.XfroabgkrH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cgfGEsKTFU +++ mktemp ++ local LAST_ERR=/tmp/tmp.OgHaRaky03 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cgfGEsKTFU ++ cat /tmp/tmp.OgHaRaky03 ++ rm /tmp/tmp.cgfGEsKTFU /tmp/tmp.OgHaRaky03 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FTzSUHaqtn +++ mktemp ++ local LAST_ERR=/tmp/tmp.XBbSucw49d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FTzSUHaqtn ++ cat /tmp/tmp.XBbSucw49d ++ rm /tmp/tmp.FTzSUHaqtn /tmp/tmp.XBbSucw49d ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.YzNxCyIuTg ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.DVj751qcpt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.YzNxCyIuTg +++++ cat /tmp/tmp.DVj751qcpt +++++ rm /tmp/tmp.YzNxCyIuTg /tmp/tmp.DVj751qcpt +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JQhWfBnYoK ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.0jvNPAvVJS +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JQhWfBnYoK +++++ cat /tmp/tmp.0jvNPAvVJS +++++ rm /tmp/tmp.JQhWfBnYoK /tmp/tmp.0jvNPAvVJS +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QyMQaeoYJZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.kYLdkce2cK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QyMQaeoYJZ ++ cat /tmp/tmp.kYLdkce2cK ++ rm /tmp/tmp.QyMQaeoYJZ /tmp/tmp.kYLdkce2cK ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EY5PB1yoeO +++ mktemp ++ local LAST_ERR=/tmp/tmp.hZFVMMshbZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EY5PB1yoeO ++ cat /tmp/tmp.hZFVMMshbZ ++ rm /tmp/tmp.EY5PB1yoeO /tmp/tmp.hZFVMMshbZ ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.BIh0IyeRrW/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.QRxbaPjsCB +++ mktemp ++ local LAST_ERR=/tmp/tmp.28pv2lrWqR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QRxbaPjsCB ++ cat /tmp/tmp.28pv2lrWqR ++ rm /tmp/tmp.QRxbaPjsCB /tmp/tmp.28pv2lrWqR ++ return 0 + newpass='=i=dScG#fDTXm}f6)3*' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''=i=dScG#fDTXm}f6)3*'\'';' '-h some-name-pxc -uroot -p'\''=i=dScG#fDTXm}f6)3*'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''=i=dScG#fDTXm}f6)3*'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''=i=dScG#fDTXm}f6)3*'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4heUTswpD6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0vi4YL1TPz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4heUTswpD6 ++ cat /tmp/tmp.0vi4YL1TPz ++ rm /tmp/tmp.4heUTswpD6 /tmp/tmp.0vi4YL1TPz ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''=i=dScG#fDTXm}f6)3*'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''=i=dScG#fDTXm}f6)3*'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''=i=dScG#fDTXm}f6)3*'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''=i=dScG#fDTXm}f6)3*'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HygwoUPQY2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.g2uHgLFMNB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HygwoUPQY2 ++ cat /tmp/tmp.g2uHgLFMNB ++ rm /tmp/tmp.HygwoUPQY2 /tmp/tmp.g2uHgLFMNB ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.BIh0IyeRrW/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.hvB2enAvbL +++ mktemp ++ local LAST_ERR=/tmp/tmp.PeAIWSzoDr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hvB2enAvbL ++ cat /tmp/tmp.PeAIWSzoDr ++ rm /tmp/tmp.hvB2enAvbL /tmp/tmp.PeAIWSzoDr ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.OvVGYL6Mct ++ mktemp + local LAST_ERR=/tmp/tmp.s4SWA5YJR3 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OvVGYL6Mct secret/my-cluster-secrets-2 configured + cat /tmp/tmp.s4SWA5YJR3 Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.OvVGYL6Mct /tmp/tmp.s4SWA5YJR3 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XdLKsAr0NU +++ mktemp ++ local LAST_ERR=/tmp/tmp.wgDKA1pPia ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XdLKsAr0NU ++ cat /tmp/tmp.wgDKA1pPia ++ rm /tmp/tmp.XdLKsAr0NU /tmp/tmp.wgDKA1pPia ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.BIh0IyeRrW/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.vqR8RTmJbU + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1724-95c26a2c#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-8381~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + local LAST_ERR=/tmp/tmp.40pNFK8dNq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vqR8RTmJbU perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.40pNFK8dNq + rm /tmp/tmp.vqR8RTmJbU /tmp/tmp.40pNFK8dNq + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.342BfKUQwp +++ mktemp ++ local LAST_ERR=/tmp/tmp.gYYKrYq7jb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.342BfKUQwp ++ cat /tmp/tmp.gYYKrYq7jb ++ rm /tmp/tmp.342BfKUQwp /tmp/tmp.gYYKrYq7jb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f7HedfwyCK +++ mktemp ++ local LAST_ERR=/tmp/tmp.RUWn4ijDki ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f7HedfwyCK ++ cat /tmp/tmp.RUWn4ijDki ++ rm /tmp/tmp.f7HedfwyCK /tmp/tmp.RUWn4ijDki ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yw7WgtaAwv +++ mktemp ++ local LAST_ERR=/tmp/tmp.wJTzdOPm06 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yw7WgtaAwv ++ cat /tmp/tmp.wJTzdOPm06 ++ rm /tmp/tmp.yw7WgtaAwv /tmp/tmp.wJTzdOPm06 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2Eze6vuKqm +++ mktemp ++ local LAST_ERR=/tmp/tmp.oaigvAjV7i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2Eze6vuKqm ++ cat /tmp/tmp.oaigvAjV7i ++ rm /tmp/tmp.2Eze6vuKqm /tmp/tmp.oaigvAjV7i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0Tc9PmCXXA +++ mktemp ++ local LAST_ERR=/tmp/tmp.jHH8t3vAG7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0Tc9PmCXXA ++ cat /tmp/tmp.jHH8t3vAG7 ++ rm /tmp/tmp.0Tc9PmCXXA /tmp/tmp.jHH8t3vAG7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fMNixumGba +++ mktemp ++ local LAST_ERR=/tmp/tmp.yxp1k1FUK4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fMNixumGba ++ cat /tmp/tmp.yxp1k1FUK4 ++ rm /tmp/tmp.fMNixumGba /tmp/tmp.yxp1k1FUK4 ++ return 0 + [[ error == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S7VYVzGn4X +++ mktemp ++ local LAST_ERR=/tmp/tmp.k8OukvJ2lD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S7VYVzGn4X ++ cat /tmp/tmp.k8OukvJ2lD ++ rm /tmp/tmp.S7VYVzGn4X /tmp/tmp.k8OukvJ2lD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eCvnBzzS7B +++ mktemp ++ local LAST_ERR=/tmp/tmp.jheJQWBYO7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eCvnBzzS7B ++ cat /tmp/tmp.jheJQWBYO7 ++ rm /tmp/tmp.eCvnBzzS7B /tmp/tmp.jheJQWBYO7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jNUvuAyd7S +++ mktemp ++ local LAST_ERR=/tmp/tmp.0SwemHhE9i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jNUvuAyd7S ++ cat /tmp/tmp.0SwemHhE9i ++ rm /tmp/tmp.jNUvuAyd7S /tmp/tmp.0SwemHhE9i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lQhycRSiph +++ mktemp ++ local LAST_ERR=/tmp/tmp.bL9Xhl5bgE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lQhycRSiph ++ cat /tmp/tmp.bL9Xhl5bgE ++ rm /tmp/tmp.lQhycRSiph /tmp/tmp.bL9Xhl5bgE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qGC6ODiU7S +++ mktemp ++ local LAST_ERR=/tmp/tmp.CQzd9oNMfz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qGC6ODiU7S ++ cat /tmp/tmp.CQzd9oNMfz ++ rm /tmp/tmp.qGC6ODiU7S /tmp/tmp.CQzd9oNMfz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gUOkFETv5k +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lj2HmS25Hj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gUOkFETv5k ++ cat /tmp/tmp.Lj2HmS25Hj ++ rm /tmp/tmp.gUOkFETv5k /tmp/tmp.Lj2HmS25Hj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EYPmhMGCGA +++ mktemp ++ local LAST_ERR=/tmp/tmp.zm0N9EgnkW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EYPmhMGCGA ++ cat /tmp/tmp.zm0N9EgnkW ++ rm /tmp/tmp.EYPmhMGCGA /tmp/tmp.zm0N9EgnkW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PM9S13yQE6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ekHn30KxY4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PM9S13yQE6 ++ cat /tmp/tmp.ekHn30KxY4 ++ rm /tmp/tmp.PM9S13yQE6 /tmp/tmp.ekHn30KxY4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.y3s26olMvg +++ mktemp ++ local LAST_ERR=/tmp/tmp.YZmbeVp6eq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.y3s26olMvg ++ cat /tmp/tmp.YZmbeVp6eq ++ rm /tmp/tmp.y3s26olMvg /tmp/tmp.YZmbeVp6eq ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4KTyU9sYZb +++ mktemp ++ local LAST_ERR=/tmp/tmp.Faq5F8kAqB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4KTyU9sYZb ++ cat /tmp/tmp.Faq5F8kAqB ++ rm /tmp/tmp.4KTyU9sYZb /tmp/tmp.Faq5F8kAqB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FLA4BJHj0o ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.BHYmRk3t5u +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FLA4BJHj0o +++++ cat /tmp/tmp.BHYmRk3t5u +++++ rm /tmp/tmp.FLA4BJHj0o /tmp/tmp.BHYmRk3t5u +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ubFw0hesuT +++ mktemp ++ local LAST_ERR=/tmp/tmp.RVqJryhibr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ubFw0hesuT ++ cat /tmp/tmp.RVqJryhibr ++ rm /tmp/tmp.ubFw0hesuT /tmp/tmp.RVqJryhibr ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.D4ywj7C0q9 ++ mktemp + local LAST_ERR=/tmp/tmp.7h5CpP2Evh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.D4ywj7C0q9 secret/my-cluster-secrets patched + cat /tmp/tmp.7h5CpP2Evh + rm /tmp/tmp.D4ywj7C0q9 /tmp/tmp.7h5CpP2Evh + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.noSJq0FfyC +++ mktemp ++ local LAST_ERR=/tmp/tmp.n6hy6uuFg1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.noSJq0FfyC ++ cat /tmp/tmp.n6hy6uuFg1 ++ rm /tmp/tmp.noSJq0FfyC /tmp/tmp.n6hy6uuFg1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eZmzaOOPnR +++ mktemp ++ local LAST_ERR=/tmp/tmp.VNlPxXj7tc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eZmzaOOPnR ++ cat /tmp/tmp.VNlPxXj7tc ++ rm /tmp/tmp.eZmzaOOPnR /tmp/tmp.VNlPxXj7tc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Oll6EARWzU +++ mktemp ++ local LAST_ERR=/tmp/tmp.GzsKefUtEu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Oll6EARWzU ++ cat /tmp/tmp.GzsKefUtEu ++ rm /tmp/tmp.Oll6EARWzU /tmp/tmp.GzsKefUtEu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SqWGDF11uk +++ mktemp ++ local LAST_ERR=/tmp/tmp.rdXuU6Qjj3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SqWGDF11uk ++ cat /tmp/tmp.rdXuU6Qjj3 ++ rm /tmp/tmp.SqWGDF11uk /tmp/tmp.rdXuU6Qjj3 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9RnjUepXfa +++ mktemp ++ local LAST_ERR=/tmp/tmp.q0R8uMHmJv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9RnjUepXfa ++ cat /tmp/tmp.q0R8uMHmJv ++ rm /tmp/tmp.9RnjUepXfa /tmp/tmp.q0R8uMHmJv ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RobBPtGIPj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZXc2gKbt7x +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RobBPtGIPj +++++ cat /tmp/tmp.ZXc2gKbt7x +++++ rm /tmp/tmp.RobBPtGIPj /tmp/tmp.ZXc2gKbt7x +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0CcTaTA5O5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.N31pqAqBbI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0CcTaTA5O5 ++ cat /tmp/tmp.N31pqAqBbI ++ rm /tmp/tmp.0CcTaTA5O5 /tmp/tmp.N31pqAqBbI ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TY3lqtP9vC +++ mktemp ++ local LAST_ERR=/tmp/tmp.Wd2jJqumb3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TY3lqtP9vC ++ cat /tmp/tmp.Wd2jJqumb3 ++ rm /tmp/tmp.TY3lqtP9vC /tmp/tmp.Wd2jJqumb3 ++ return 0 + client_pod=pxc-client-6644d8898f-lfgck + wait_pod pxc-client-6644d8898f-lfgck + local pod=pxc-client-6644d8898f-lfgck + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-lfgck ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-lfgck condition met pxc-client-6644d8898f-lfgck.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.BIh0IyeRrW/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1724/e2e-tests/users/compare/select-3.sql /tmp/tmp.BIh0IyeRrW/select-3.sql + destroy users-8381 + local namespace=users-8381 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + sort -u +++ grep -c percona-xtradb-cluster-operator + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator + tee /tmp/tmp.BIh0IyeRrW/operator.log + grep -v 'get backup status: Job.batch' ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.BQoERgAFhk +++ mktemp ++ local LAST_ERR=/tmp/tmp.2u4s52TuJk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BQoERgAFhk ++ cat /tmp/tmp.2u4s52TuJk ++ rm /tmp/tmp.BQoERgAFhk /tmp/tmp.2u4s52TuJk ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-b98bcb965-c7ldl ++ mktemp + local LAST_OUT=/tmp/tmp.tl3KBA6jL2 ++ mktemp + local LAST_ERR=/tmp/tmp.ADUApYpXjA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-b98bcb965-c7ldl + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tl3KBA6jL2 + cat /tmp/tmp.ADUApYpXjA + rm /tmp/tmp.tl3KBA6jL2 /tmp/tmp.ADUApYpXjA + return 0 2024-06-21T08:06:59.823Z INFO setup Manager starting up {"gitCommit": "95c26a2c4b0fed99e50f66d2aeba66f92aa400d7", "gitBranch": "PR-1724-95c26a2c", "buildTime": "2024-06-21T06:06:43Z", "goVersion": "go1.22.4", "os": "linux", "arch": "amd64"} 2024-06-21T08:06:59.823Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1404000"} 2024-06-21T08:06:59.824Z INFO setup Registering Components. 2024-06-21T08:07:04.006Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-06-21T08:07:04.009Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-06-21T08:07:04.009Z INFO controller-runtime.metrics Starting metrics server 2024-06-21T08:07:04.009Z INFO controller-runtime.webhook Starting webhook server 2024-06-21T08:07:04.009Z INFO setup Starting the Cmd. 2024-06-21T08:07:04.009Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-06-21T08:07:04.010Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-06-21T08:07:04.010Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-06-21T08:07:04.010Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-06-21T08:07:04.111Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-06-21T08:07:04.127Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-06-21T08:07:04.128Z DEBUG events percona-xtradb-cluster-operator-b98bcb965-c7ldl_518c5ff8-d2e6-4290-b3b9-fe84b5b5328b became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"3dc23d2c-eb0e-4616-95ee-9d476756aeca","apiVersion":"coordination.k8s.io/v1","resourceVersion":"63497"}, "reason": "LeaderElection"} 2024-06-21T08:07:04.128Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-06-21T08:07:04.128Z INFO Starting Controller {"controller": "pxc-controller"} 2024-06-21T08:07:04.128Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-06-21T08:07:04.128Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-06-21T08:07:04.128Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-06-21T08:07:04.128Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-06-21T08:07:04.234Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-06-21T08:07:04.234Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-06-21T08:07:04.244Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-06-21T08:07:27.732Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "384e1479-f141-4a5b-9a84-56ca1d0fb994", "version": "1.15.0"} 2024-06-21T08:08:48.072Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "690d52db-b116-4946-932d-600558ea3625", "user": "operator"} 2024-06-21T08:08:48.111Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "690d52db-b116-4946-932d-600558ea3625", "user": "monitor"} 2024-06-21T08:08:48.220Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "690d52db-b116-4946-932d-600558ea3625"} 2024-06-21T08:08:48.261Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "690d52db-b116-4946-932d-600558ea3625"} 2024-06-21T08:08:48.300Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "690d52db-b116-4946-932d-600558ea3625", "user": "xtrabackup"} 2024-06-21T08:08:48.358Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "690d52db-b116-4946-932d-600558ea3625"} 2024-06-21T08:08:48.399Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "690d52db-b116-4946-932d-600558ea3625", "user": "replication"} 2024-06-21T08:08:53.507Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "690d52db-b116-4946-932d-600558ea3625", "err": "get primary pxc pod: not found"} 2024-06-21T08:08:53.705Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f06535d4-5861-465a-8e65-b5b5a13979ec", "err": "get primary pxc pod: not found"} 2024-06-21T08:08:58.713Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "fdd5498c-33e7-422d-92a4-2fef5620bf7a", "err": "get primary pxc pod: not found"} 2024-06-21T08:11:12.097Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ed287c00-54db-4d27-abcc-41b1999c75b1", "user": "root"} 2024-06-21T08:11:12.443Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ed287c00-54db-4d27-abcc-41b1999c75b1", "new version": "8.0.36-28.1"} 2024-06-21T08:11:15.684Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ed287c00-54db-4d27-abcc-41b1999c75b1"} 2024-06-21T08:11:21.105Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "4be1d289-11e8-4f4a-abbd-821ff5173c54"} 2024-06-21T08:11:26.564Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "7f8439e3-8321-4758-8eba-a439df5c0664"} 2024-06-21T08:11:31.971Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f2fee162-9481-47ed-a069-14b2bdceb045"} 2024-06-21T08:11:37.355Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "c9a9be7b-fa2f-4a4a-b6ed-94b12053f57e"} 2024-06-21T08:11:42.809Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f809f876-7228-447c-b7ce-63df36aa70a8"} 2024-06-21T08:11:49.063Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "0da468fc-1321-434b-b943-4b6984789ce3"} 2024-06-21T08:11:54.077Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "1975df72-e464-4dda-8c6c-313742769c05"} 2024-06-21T08:11:59.369Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "1296af54-c1a2-4331-9a7a-96effdf1de7e"} 2024-06-21T08:12:04.567Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ea3604c6-bf1f-43fa-ad31-fe208afe04f0"} 2024-06-21T08:12:09.995Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b3198b8a-4f90-4145-b734-630879afb0ce"} 2024-06-21T08:12:15.491Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "5408ffe6-a7a2-4f18-9aed-398601459df2"} 2024-06-21T08:12:17.634Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "8a700845-ce7e-4421-9068-2318f277447e", "user": "root"} 2024-06-21T08:12:17.690Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "8a700845-ce7e-4421-9068-2318f277447e", "user": "root"} 2024-06-21T08:12:17.705Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "8a700845-ce7e-4421-9068-2318f277447e", "secret": "some-name-mysql-init", "user": "root"} 2024-06-21T08:12:23.284Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "8a700845-ce7e-4421-9068-2318f277447e"} 2024-06-21T08:12:23.296Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "8a700845-ce7e-4421-9068-2318f277447e", "user": "root"} 2024-06-21T08:12:23.351Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "8a700845-ce7e-4421-9068-2318f277447e", "user": "root"} 2024-06-21T08:12:28.169Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "8a700845-ce7e-4421-9068-2318f277447e"} 2024-06-21T08:12:32.283Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "63bfcfdc-5477-47b4-b4c0-52334c619353"} 2024-06-21T08:12:36.582Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "61961411-45ba-4d3b-b3e5-f451142999d9", "error": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-21T08:12:55.300Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "4060b4ef-fab3-475d-84f6-1b552508844e", "err": "get primary pxc pod: not found"} 2024-06-21T08:12:59.770Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "26b4e319-bbdd-4445-8cce-6e9fdcc8dd6c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-21T08:13:00.530Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "82cb00e6-a3e8-4882-b005-dee169c6f5ef", "user": "proxyadmin"} 2024-06-21T08:13:00.530Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "82cb00e6-a3e8-4882-b005-dee169c6f5ef", "user": "proxyadmin"} 2024-06-21T08:13:00.605Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "82cb00e6-a3e8-4882-b005-dee169c6f5ef", "user": "proxyadmin"} 2024-06-21T08:13:00.615Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "82cb00e6-a3e8-4882-b005-dee169c6f5ef", "user": "proxyadmin"} 2024-06-21T08:13:00.615Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "82cb00e6-a3e8-4882-b005-dee169c6f5ef", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-06-21T08:13:00.906Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "82cb00e6-a3e8-4882-b005-dee169c6f5ef", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-21T08:13:37.266Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "3659b63f-208f-49a5-8387-c53c1f59d485"} 2024-06-21T08:13:44.304Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "648d75c0-0e8d-45a3-9057-c5aca2406013"} 2024-06-21T08:13:50.761Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "a666cfb9-fb70-4d4a-9ccc-465a790044d0", "user": "xtrabackup"} 2024-06-21T08:13:50.794Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "a666cfb9-fb70-4d4a-9ccc-465a790044d0", "user": "xtrabackup"} 2024-06-21T08:13:50.803Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "a666cfb9-fb70-4d4a-9ccc-465a790044d0", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-21T08:13:50.819Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "a666cfb9-fb70-4d4a-9ccc-465a790044d0", "user": "xtrabackup"} 2024-06-21T08:13:50.849Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "a666cfb9-fb70-4d4a-9ccc-465a790044d0", "user": "xtrabackup"} 2024-06-21T08:13:50.858Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "a666cfb9-fb70-4d4a-9ccc-465a790044d0", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-06-21T08:13:53.730Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "8a1d0899-dbc5-435a-a717-7c3824077bd7"} 2024-06-21T08:14:44.373Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "6034941e-665e-49d1-b0ff-fab5a4b6e60e", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:14:54.667Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "24930f5e-d365-4d8a-861b-fa98c9a5d476", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:15:47.911Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "78bbb74f-5a97-4d1f-8530-7bdeea3d9fcb", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp 10.153.242.56:33062: connect: connection refused"} 2024-06-21T08:15:53.139Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ef81f288-ecda-4038-8873-eb53eb169a8e", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:16:08.848Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "879f60c4-35b6-4baa-bde5-4a92dbda60da", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:16:19.258Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "6416c5d4-81e7-4db5-96f6-c81626d4f4e6", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:16:28.199Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "2995a85e-237c-4e1a-ac11-b528e86f2ec0"} 2024-06-21T08:16:33.287Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d0ad2174-ea12-4a98-81e0-9dc611bf0e4c"} 2024-06-21T08:16:38.701Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "7e65eb84-24c9-462e-8ef3-4654b13597d7"} 2024-06-21T08:16:44.478Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "bcac9dce-e620-4c91-abe8-a42f8696680a"} 2024-06-21T08:16:46.058Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ba8a425e-78f6-494c-8971-7619691b22cc", "user": "monitor"} 2024-06-21T08:16:46.089Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ba8a425e-78f6-494c-8971-7619691b22cc", "user": "monitor"} 2024-06-21T08:16:46.098Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ba8a425e-78f6-494c-8971-7619691b22cc", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-21T08:16:46.146Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ba8a425e-78f6-494c-8971-7619691b22cc", "user": "monitor"} 2024-06-21T08:16:46.168Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ba8a425e-78f6-494c-8971-7619691b22cc", "user": "monitor"} 2024-06-21T08:16:46.287Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ba8a425e-78f6-494c-8971-7619691b22cc", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-21T08:16:49.128Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ba8a425e-78f6-494c-8971-7619691b22cc", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-21T08:17:45.473Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "32dfab3a-c872-4df8-bd87-37e9a218730f", "user": "monitor"} 2024-06-21T08:17:45.785Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "32dfab3a-c872-4df8-bd87-37e9a218730f", "user": "monitor"} 2024-06-21T08:17:45.810Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "32dfab3a-c872-4df8-bd87-37e9a218730f", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-21T08:17:49.466Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "32dfab3a-c872-4df8-bd87-37e9a218730f"} 2024-06-21T08:17:54.265Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b4c36e29-f543-482a-ac9e-f63c55464c76"} 2024-06-21T08:17:59.534Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "1e4b6ee2-78ee-4a44-bb3e-759e7d7a2c50"} 2024-06-21T08:18:05.226Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "bc25a2c6-525c-49e7-b0f9-a889431a867e"} 2024-06-21T08:18:10.153Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "217690a6-b396-4ffa-8c6f-b71a08d17088"} 2024-06-21T08:18:15.750Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "76c60668-37cf-4ece-8e36-31537d7906c1"} 2024-06-21T08:18:17.623Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "484b2884-8730-4bb0-a7f7-6604c7d02a08", "user": "operator"} 2024-06-21T08:18:17.656Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "484b2884-8730-4bb0-a7f7-6604c7d02a08", "user": "operator"} 2024-06-21T08:18:17.677Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "484b2884-8730-4bb0-a7f7-6604c7d02a08", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-21T08:18:17.694Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "484b2884-8730-4bb0-a7f7-6604c7d02a08", "user": "operator"} 2024-06-21T08:18:17.727Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "484b2884-8730-4bb0-a7f7-6604c7d02a08", "user": "operator"} 2024-06-21T08:18:17.761Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "484b2884-8730-4bb0-a7f7-6604c7d02a08", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-21T08:18:19.166Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "484b2884-8730-4bb0-a7f7-6604c7d02a08", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-8381.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-8381.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-8381.svc.c' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-8381.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-8381.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-8381.svc.c' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-21T08:19:02.349Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ad8ade59-685f-413c-bd66-e8b851e5c397"} 2024-06-21T08:19:12.062Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f4a19426-3f97-44c5-860a-7af2f90c11e8"} 2024-06-21T08:19:14.152Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "secrets": "my-cluster-secrets-2"} 2024-06-21T08:19:14.162Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "root"} 2024-06-21T08:19:14.211Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "root"} 2024-06-21T08:19:14.221Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "secret": "some-name-mysql-init", "user": "root"} 2024-06-21T08:19:22.135Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149"} 2024-06-21T08:19:22.144Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "root"} 2024-06-21T08:19:22.201Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "root"} 2024-06-21T08:19:22.219Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "operator"} 2024-06-21T08:19:22.253Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "operator"} 2024-06-21T08:19:22.264Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-21T08:19:22.277Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "operator"} 2024-06-21T08:19:22.309Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "operator"} 2024-06-21T08:19:22.321Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "monitor"} 2024-06-21T08:19:22.354Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "monitor"} 2024-06-21T08:19:22.363Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-21T08:19:22.431Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "monitor"} 2024-06-21T08:19:22.444Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "monitor"} 2024-06-21T08:19:22.549Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "xtrabackup"} 2024-06-21T08:19:22.567Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "20ca46c9-eefb-4e73-9f1b-4618dd8c7e82"} 2024-06-21T08:19:22.577Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "xtrabackup"} 2024-06-21T08:19:22.585Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-21T08:19:22.592Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "xtrabackup"} 2024-06-21T08:19:22.624Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "xtrabackup"} 2024-06-21T08:19:22.637Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "replication"} 2024-06-21T08:19:22.668Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "replication"} 2024-06-21T08:19:22.678Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-21T08:19:22.687Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "replication"} 2024-06-21T08:19:22.716Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "replication"} 2024-06-21T08:19:22.716Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "proxyadmin"} 2024-06-21T08:19:22.765Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "proxyadmin"} 2024-06-21T08:19:22.775Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "user": "proxyadmin"} 2024-06-21T08:19:22.775Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "last-applied-secret": "166313123ed7b6459849b90aaec216258484a57d39fe214fa3a95387ddcb0b92"} 2024-06-21T08:19:22.775Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "last-applied-secret": "166313123ed7b6459849b90aaec216258484a57d39fe214fa3a95387ddcb0b92"} 2024-06-21T08:19:23.049Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "df918de8-e83f-40b2-8ac2-a55af237f149", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-21T08:20:10.819Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "794855b1-b651-44a1-aa1f-74b52af9bc7b", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:20:15.854Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "714885a3-8694-4efd-b9c6-e15711764b33", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:20:21.233Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "dd72869c-290c-46c8-89c4-c66904dd50c4", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:21:08.967Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "4381a4f6-32d7-4fe6-b2c5-9211c8f8b282", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.153.242.59:33062: connect: connection refused"} 2024-06-21T08:21:14.147Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "27323cc2-6f0c-45f6-b21e-a44751658928", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:21:19.330Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "3d142771-94a3-4f25-bea6-2bb24063a3a4", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:21:24.658Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "4ab52f3c-b7fa-4f9e-afa8-c26b5ce8e99a", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:21:29.893Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "8a9b739e-2753-49f9-a494-eb8651a63506", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:21:35.194Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "dd8ddd2d-abd8-497c-a391-38f7cff28146", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:21:40.419Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "40599b98-0bc2-4c08-b12e-2c730c6b7fd5", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:21:45.710Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "e3dd1fd0-b44e-4963-b23d-3dd83d863d86", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:21:51.183Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "c470161e-3c80-40a9-88bf-89dc8b2c1ade", "user": "monitor"} 2024-06-21T08:21:51.464Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "c470161e-3c80-40a9-88bf-89dc8b2c1ade", "user": "monitor"} 2024-06-21T08:21:51.485Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "c470161e-3c80-40a9-88bf-89dc8b2c1ade", "last-applied-secret": "166313123ed7b6459849b90aaec216258484a57d39fe214fa3a95387ddcb0b92"} 2024-06-21T08:21:55.128Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "c470161e-3c80-40a9-88bf-89dc8b2c1ade"} 2024-06-21T08:21:59.842Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "06e2db91-2a40-47d2-9178-0f34d46b5604"} 2024-06-21T08:22:01.553Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "fead9fd9-5083-492d-848a-5cdefa656098", "user": "operator"} 2024-06-21T08:22:01.583Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "fead9fd9-5083-492d-848a-5cdefa656098", "user": "operator"} 2024-06-21T08:22:01.597Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "fead9fd9-5083-492d-848a-5cdefa656098", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-21T08:22:01.734Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "fead9fd9-5083-492d-848a-5cdefa656098", "user": "operator"} 2024-06-21T08:22:01.767Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "fead9fd9-5083-492d-848a-5cdefa656098", "user": "operator"} 2024-06-21T08:22:01.803Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "fead9fd9-5083-492d-848a-5cdefa656098", "last-applied-secret": "dade6014164eb344d88e2e848f913677a0625394d771926b5b0f581beb3c4be8"} 2024-06-21T08:22:03.849Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "fead9fd9-5083-492d-848a-5cdefa656098", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-8381.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-8381.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-8381.svc.c' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-8381.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-8381.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-8381.svc.c' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-21T08:22:43.500Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "129a16be-d1ea-4182-96a3-780c126b8569"} 2024-06-21T08:22:52.061Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "fe191563-3aba-4358-a3e8-2f1e637ebf01"} 2024-06-21T08:22:57.248Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "1a729a55-fabb-4c05-aa58-7f59bcf5a6d4"} 2024-06-21T08:23:02.809Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "788b3cb2-011b-461c-bfd5-23912e0f0612"} 2024-06-21T08:23:08.267Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "52cfc202-d693-4174-a7c2-8e9560195b70"} 2024-06-21T08:23:15.270Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "aa4a30d8-9f2a-46e4-a30e-265a05b5b859"} 2024-06-21T08:23:19.431Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f7ec4f55-d1fa-4770-8aff-73bd970909bc"} 2024-06-21T08:23:24.445Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d7906630-8628-4cb9-86d6-bd2b6845a219"} 2024-06-21T08:23:29.765Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "5ec2b6c8-c71a-47e6-9773-095b63362afa"} 2024-06-21T08:23:35.244Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "4f0c1b96-f877-4380-9d6e-d8d466328c9d"} 2024-06-21T08:23:40.555Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "82eed773-e657-41a0-96c3-eab4d271b3f0"} 2024-06-21T08:23:46.020Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "9ad1a6f5-bd62-495a-83bc-7f77abe6d99f"} 2024-06-21T08:23:51.459Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "c092b7c4-f08a-4a50-8558-cdc23da9019d"} 2024-06-21T08:23:56.955Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "8fd9fd7c-e8ef-4b8b-83ec-36be452c95a4"} 2024-06-21T08:23:58.520Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "root"} 2024-06-21T08:23:58.569Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "root"} 2024-06-21T08:23:58.578Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "secret": "some-name-mysql-init", "user": "root"} 2024-06-21T08:24:04.192Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687"} 2024-06-21T08:24:04.202Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "root"} 2024-06-21T08:24:04.257Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "root"} 2024-06-21T08:24:04.281Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "monitor"} 2024-06-21T08:24:04.314Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "monitor"} 2024-06-21T08:24:04.323Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-21T08:24:04.381Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "monitor"} 2024-06-21T08:24:04.390Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "monitor"} 2024-06-21T08:24:04.477Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "xtrabackup"} 2024-06-21T08:24:04.509Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "xtrabackup"} 2024-06-21T08:24:04.522Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-21T08:24:04.533Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "xtrabackup"} 2024-06-21T08:24:04.565Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "xtrabackup"} 2024-06-21T08:24:04.577Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "proxyadmin"} 2024-06-21T08:24:04.628Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "proxyadmin"} 2024-06-21T08:24:04.639Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "user": "proxyadmin"} 2024-06-21T08:24:04.639Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "last-applied-secret": "c01c4edeaecd0a3efb188001fab779b3d85ab81fc4586739654e8e8147116031"} 2024-06-21T08:24:04.639Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "last-applied-secret": "c01c4edeaecd0a3efb188001fab779b3d85ab81fc4586739654e8e8147116031"} 2024-06-21T08:24:04.950Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f682ef7e-518b-4e54-b6d1-f703babc9687", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-21T08:24:53.263Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "51a856cd-0f3d-46aa-b93c-4e34a91e8db4", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:24:58.161Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "27c1e185-8042-45d4-a853-5eff1d66470c", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:25:03.490Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "78315bb4-640d-43dd-8975-a082321c3aa4", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:25:40.537Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "93c00dd7-c4f3-4f27-8357-b3ec0d20ac9e", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:25:40.922Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "e4cb7757-ca98-40ae-81fa-9d38d4b5a350", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:25:45.832Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "090e79af-7394-4948-ad80-cf194877d5c3", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:25:51.064Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "81057479-2d94-4167-9e4c-c949fd138d46", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:25:56.232Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "8437e7c7-5108-42a5-8a4a-8d23b150d6ae", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:26:01.402Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "a8df9fe1-7a2c-4b37-8011-63e386e9d517", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:26:06.625Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "74f76ea0-97cc-4562-86b9-76455136a2bc", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:26:11.850Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "6f1761ff-15c2-438b-8011-45e439e5548b", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:26:17.567Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "3c378340-34fa-4419-8f44-5e59fa25b9b1", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:26:22.887Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "014b5b1d-d4b4-4c79-b468-f5480cfa86eb", "primary name": "some-name-pxc-0.some-name-pxc.users-8381.svc.cluster.local"} 2024-06-21T08:26:28.375Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "32977946-2c12-419b-a018-4eaa094e77f1", "user": "monitor"} 2024-06-21T08:26:28.637Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "32977946-2c12-419b-a018-4eaa094e77f1", "user": "monitor"} 2024-06-21T08:26:28.661Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "32977946-2c12-419b-a018-4eaa094e77f1", "last-applied-secret": "c01c4edeaecd0a3efb188001fab779b3d85ab81fc4586739654e8e8147116031"} 2024-06-21T08:26:32.437Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "32977946-2c12-419b-a018-4eaa094e77f1"} 2024-06-21T08:26:37.030Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "7ee5f18f-7732-47c8-a99c-92a67a98e37a"} 2024-06-21T08:26:42.513Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "f8b7c4c3-49dc-47df-a82b-2e575f21a177"} 2024-06-21T08:26:42.549Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "user": "root"} 2024-06-21T08:26:42.601Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "user": "root"} 2024-06-21T08:26:42.612Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "secret": "some-name-mysql-init", "user": "root"} 2024-06-21T08:26:42.621Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "user": "root"} 2024-06-21T08:26:42.677Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "user": "root"} 2024-06-21T08:26:42.687Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "user": "operator"} 2024-06-21T08:26:42.713Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "user": "operator"} 2024-06-21T08:26:42.726Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-21T08:26:42.735Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "user": "operator"} 2024-06-21T08:26:42.768Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "user": "operator"} 2024-06-21T08:26:42.779Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "user": "monitor"} 2024-06-21T08:26:42.828Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "user": "monitor"} 2024-06-21T08:26:42.851Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-21T08:26:42.865Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "user": "monitor"} 2024-06-21T08:26:43.000Z ERROR Reconciler error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071", "error": "reconcile users: manage sys users: is password propagated: Pod \"some-name-haproxy-0\" not found", "errorVerbose": "Pod \"some-name-haproxy-0\" not found\nis password propagated\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).handleMonitorUser\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:517\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).updateUsers\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:165\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileUsers\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:110\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:281\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:114\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:311\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695\nmanage sys users\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).reconcileUsers\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:112\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:281\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:114\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:311\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695\nreconcile users\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).Reconcile\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:283\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Reconcile\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:114\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:311\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261\nsigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2\n\t/go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-21T08:26:43.000Z INFO Warning: Reconciler returned both a non-zero result and a non-nil error. The result will always be ignored if the error is non-nil and the non-nil error causes reqeueuing with exponential backoff. For more details, see: https://pkg.go.dev/sigs.k8s.io/controller-runtime/pkg/reconcile#Reconciler {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b378bd72-f6bf-45e3-91d2-9409e5251071"} 2024-06-21T08:26:43.679Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 23c14fed-b13f-4895-a2e4-4da072ed9a82 2024-06-21T08:26:46.857Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "6cee2f49-29f1-4cf3-8659-239e8aa3f097", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.228.189.3:3306: connect: connection refused"} 2024-06-21T08:27:50.640Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "9a83d5e5-2de2-4a19-8041-f5fa6b19de94", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:27:50.916Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "ddb456e3-5eb8-48e1-8832-3a7099778715", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.153.240.45:33062: connect: connection refused"} 2024-06-21T08:28:39.048Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "25337c2a-64b0-437b-a78f-979f9b12c02b", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:29:19.554Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "user": "monitor"} 2024-06-21T08:29:19.883Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "user": "monitor"} 2024-06-21T08:29:19.895Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "user": "xtrabackup"} 2024-06-21T08:29:19.925Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "user": "xtrabackup"} 2024-06-21T08:29:19.934Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-21T08:29:19.943Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "user": "xtrabackup"} 2024-06-21T08:29:19.976Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "user": "xtrabackup"} 2024-06-21T08:29:19.985Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "user": "replication"} 2024-06-21T08:29:20.012Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "user": "replication"} 2024-06-21T08:29:20.021Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-21T08:29:20.035Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "user": "replication"} 2024-06-21T08:29:20.064Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "user": "replication"} 2024-06-21T08:29:20.064Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-21T08:29:20.064Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "d5c03085-c3f0-490f-94c8-8c02dab7c6a3", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-21T08:31:31.812Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "79bb88fc-6039-48ce-924e-53a0762d197a", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-8381 on 10.228.176.10:53: no such host"} 2024-06-21T08:31:58.417Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b829e00c-62cc-4d9a-8505-96d3445447ed", "user": "monitor"} 2024-06-21T08:31:58.452Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b829e00c-62cc-4d9a-8505-96d3445447ed", "user": "monitor"} 2024-06-21T08:31:58.467Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b829e00c-62cc-4d9a-8505-96d3445447ed", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-21T08:31:58.495Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b829e00c-62cc-4d9a-8505-96d3445447ed", "user": "monitor"} 2024-06-21T08:31:58.613Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "b829e00c-62cc-4d9a-8505-96d3445447ed", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} 2024-06-21T08:33:04.044Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "c93c4c4c-e86e-461a-bd99-c255f0ee284c", "user": "monitor"} 2024-06-21T08:33:04.429Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "c93c4c4c-e86e-461a-bd99-c255f0ee284c", "user": "monitor"} 2024-06-21T08:33:04.457Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-8381", "name": "some-name", "reconcileID": "c93c4c4c-e86e-461a-bd99-c255f0ee284c", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/06/21 08:20:21 packets.go:37: read tcp 10.153.242.54:52792->10.153.240.38:33062: read: connection reset by peer [mysql] 2024/06/21 08:27:50 packets.go:37: read tcp 10.153.242.54:57644->10.153.240.45:33062: read: connection reset by peer [mysql] 2024/06/21 08:28:58 packets.go:37: unexpected EOF [mysql] 2024/06/21 08:31:21 packets.go:37: read tcp 10.153.242.54:41172->10.228.189.3:3306: i/o timeout [mysql] 2024/06/21 08:31:31 packets.go:37: unexpected EOF sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-8381 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.mJYe7XYmDv ++ mktemp + local LAST_ERR=/tmp/tmp.SQMrCVcHdd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mJYe7XYmDv perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.SQMrCVcHdd + rm /tmp/tmp.mJYe7XYmDv /tmp/tmp.SQMrCVcHdd + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ZtknD0sfLM ++ mktemp + local LAST_ERR=/tmp/tmp.SbyJfcC9Of + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZtknD0sfLM No resources found + cat /tmp/tmp.SbyJfcC9Of + rm /tmp/tmp.ZtknD0sfLM /tmp/tmp.SbyJfcC9Of + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.kpcNH2CT4o ++ mktemp + local LAST_ERR=/tmp/tmp.lTuNGw8tLM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.kpcNH2CT4o No resources found + cat /tmp/tmp.lTuNGw8tLM + rm /tmp/tmp.kpcNH2CT4o /tmp/tmp.lTuNGw8tLM + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.va9zO0b5yl ++ mktemp + local LAST_ERR=/tmp/tmp.4jHX3L9pnc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.va9zO0b5yl validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.4jHX3L9pnc + rm /tmp/tmp.va9zO0b5yl /tmp/tmp.4jHX3L9pnc + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-8381 + rm -rf /tmp/tmp.BIh0IyeRrW ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.NPOfxvZo9h + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.YgVMiHFGkG ++ mktemp + local LAST_ERR=/tmp/tmp.KOTG8xigiT + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.sJFFskXmfJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-8381 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator