Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/logs/users-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-16979 + local ns=users-16979 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-12309 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.fLkS8eSzLi ++ mktemp + local LAST_ERR=/tmp/tmp.Pit3EO1uj0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fLkS8eSzLi perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.Pit3EO1uj0 + rm /tmp/tmp.fLkS8eSzLi /tmp/tmp.Pit3EO1uj0 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.qhO9rkCAR3 ++ mktemp + local LAST_ERR=/tmp/tmp.O9dldHTN6N + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qhO9rkCAR3 No resources found + cat /tmp/tmp.O9dldHTN6N + rm /tmp/tmp.qhO9rkCAR3 /tmp/tmp.O9dldHTN6N + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.VL3dkJUCQH ++ mktemp + local LAST_ERR=/tmp/tmp.D11z8V4dnW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VL3dkJUCQH No resources found + cat /tmp/tmp.D11z8V4dnW + rm /tmp/tmp.VL3dkJUCQH /tmp/tmp.D11z8V4dnW + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + xargs kubectl delete ns + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + local LAST_OUT=/tmp/tmp.NmWpV0fNjv ++ mktemp + local LAST_ERR=/tmp/tmp.j5UyapAIMG + local exit_status=0 ++ mktemp ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + local LAST_OUT=/tmp/tmp.AkeTJv9yiE ++ mktemp + local LAST_ERR=/tmp/tmp.yyyn8P78BF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AkeTJv9yiE + cat /tmp/tmp.yyyn8P78BF + rm /tmp/tmp.AkeTJv9yiE /tmp/tmp.yyyn8P78BF + return 0 namespace "users-12309" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NmWpV0fNjv namespace "pxc-operator" deleted + cat /tmp/tmp.j5UyapAIMG + rm /tmp/tmp.NmWpV0fNjv /tmp/tmp.j5UyapAIMG + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.S6oeNxpdZp ++ mktemp + local LAST_ERR=/tmp/tmp.L6xKGwyubK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.S6oeNxpdZp namespace/pxc-operator created + cat /tmp/tmp.L6xKGwyubK + rm /tmp/tmp.S6oeNxpdZp /tmp/tmp.L6xKGwyubK + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.3ehwJK7Vrl +++ mktemp ++ local LAST_ERR=/tmp/tmp.v62qDzPk51 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3ehwJK7Vrl ++ cat /tmp/tmp.v62qDzPk51 ++ rm /tmp/tmp.3ehwJK7Vrl /tmp/tmp.v62qDzPk51 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1710-fb797906-3-cluster3 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.sOo2ZLZyNS ++ mktemp + local LAST_ERR=/tmp/tmp.QvqluypvyZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1710-fb797906-3-cluster3 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sOo2ZLZyNS Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1710-fb797906-3-cluster3" modified. + cat /tmp/tmp.QvqluypvyZ + rm /tmp/tmp.sOo2ZLZyNS /tmp/tmp.QvqluypvyZ + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.uYlXg78dmb ++ mktemp + local LAST_ERR=/tmp/tmp.EOx6NqINcW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.uYlXg78dmb customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.EOx6NqINcW + rm /tmp/tmp.uYlXg78dmb /tmp/tmp.EOx6NqINcW + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.HYhZaifzjx ++ mktemp + local LAST_ERR=/tmp/tmp.kIRIdjghUd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HYhZaifzjx clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.kIRIdjghUd + rm /tmp/tmp.HYhZaifzjx /tmp/tmp.kIRIdjghUd + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1710-fb797906^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.b1iQcAJyaS ++ mktemp + local LAST_ERR=/tmp/tmp.F639RxUgw0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.b1iQcAJyaS deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.F639RxUgw0 + rm /tmp/tmp.b1iQcAJyaS /tmp/tmp.F639RxUgw0 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.beABM9BuXP ++ mktemp + local LAST_ERR=/tmp/tmp.dgYWZ0XlEO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.beABM9BuXP pod/percona-xtradb-cluster-operator-8448ddf579-96dlf condition met + cat /tmp/tmp.dgYWZ0XlEO + rm /tmp/tmp.beABM9BuXP /tmp/tmp.dgYWZ0XlEO + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.NUeAxDNq9i +++ mktemp ++ local LAST_ERR=/tmp/tmp.0RDooExYfQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NUeAxDNq9i ++ cat /tmp/tmp.0RDooExYfQ ++ rm /tmp/tmp.NUeAxDNq9i /tmp/tmp.0RDooExYfQ ++ return 0 + wait_pod percona-xtradb-cluster-operator-8448ddf579-96dlf 480 pxc-operator + local pod=percona-xtradb-cluster-operator-8448ddf579-96dlf + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-8448ddf579-96dlf ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-8448ddf579-96dlf condition met percona-xtradb-cluster-operator-8448ddf579-96dlf.Ok + sleep 3 + create_namespace users-16979 + local namespace=users-16979 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-16979' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-16979 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-16979 + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.umvq530Aa3 ++ mktemp + local LAST_OUT=/tmp/tmp.vrwiglqbYZ ++ mktemp + local LAST_ERR=/tmp/tmp.XaxThQelGn + local exit_status=0 + local LAST_ERR=/tmp/tmp.JETXxDFF6u + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-16979 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.umvq530Aa3 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-16979 + cat /tmp/tmp.XaxThQelGn + rm /tmp/tmp.umvq530Aa3 /tmp/tmp.XaxThQelGn + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-16979 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.vrwiglqbYZ + cat /tmp/tmp.JETXxDFF6u Error from server (NotFound): namespaces "users-16979" not found + rm /tmp/tmp.vrwiglqbYZ /tmp/tmp.JETXxDFF6u + return 1 + : + wait_for_delete namespace/users-16979 + local res=namespace/users-16979 + echo -n 'namespace/users-16979 - ' namespace/users-16979 - + set +o xtrace Error from server (NotFound): namespaces "users-16979" not found + desc 'create namespace users-16979' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-16979 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-16979 ++ mktemp + local LAST_OUT=/tmp/tmp.iU2U9SEZk0 ++ mktemp + local LAST_ERR=/tmp/tmp.a9Pnqwt9wP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-16979 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iU2U9SEZk0 namespace/users-16979 created + cat /tmp/tmp.a9Pnqwt9wP + rm /tmp/tmp.iU2U9SEZk0 /tmp/tmp.a9Pnqwt9wP + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.20hARvMszq +++ mktemp ++ local LAST_ERR=/tmp/tmp.dky1tchVlW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.20hARvMszq ++ cat /tmp/tmp.dky1tchVlW ++ rm /tmp/tmp.20hARvMszq /tmp/tmp.dky1tchVlW ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1710-fb797906-3-cluster3 --namespace=users-16979 ++ mktemp + local LAST_OUT=/tmp/tmp.EYRaeW8UDF ++ mktemp + local LAST_ERR=/tmp/tmp.BNeKKZT4Bk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1710-fb797906-3-cluster3 --namespace=users-16979 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EYRaeW8UDF Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1710-fb797906-3-cluster3" modified. + cat /tmp/tmp.BNeKKZT4Bk + rm /tmp/tmp.EYRaeW8UDF /tmp/tmp.BNeKKZT4Bk + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.1YSfMSeMiE ++ mktemp + local LAST_ERR=/tmp/tmp.wtKxMRN9Vc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1YSfMSeMiE secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.wtKxMRN9Vc + rm /tmp/tmp.1YSfMSeMiE /tmp/tmp.wtKxMRN9Vc + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.X06CFwOkJl ++ mktemp + local LAST_ERR=/tmp/tmp.atU14PvfHU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.X06CFwOkJl secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.atU14PvfHU + rm /tmp/tmp.X06CFwOkJl /tmp/tmp.atU14PvfHU + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1710-fb797906#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + local LAST_OUT=/tmp/tmp.IaN3nfz1rV + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-16979~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + local LAST_ERR=/tmp/tmp.P51a63kgWI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IaN3nfz1rV deployment.apps/pxc-client created + cat /tmp/tmp.P51a63kgWI + rm /tmp/tmp.IaN3nfz1rV /tmp/tmp.P51a63kgWI + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_OUT=/tmp/tmp.71Ler4fS4V + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1710-fb797906#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.JI2vQuMaGh + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-16979~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.71Ler4fS4V perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.JI2vQuMaGh + rm /tmp/tmp.71Ler4fS4V /tmp/tmp.JI2vQuMaGh + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.E67Lgufway ++++ mktemp +++ local LAST_ERR=/tmp/tmp.64AxsOviWX +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.E67Lgufway +++ cat /tmp/tmp.64AxsOviWX +++ rm /tmp/tmp.E67Lgufway /tmp/tmp.64AxsOviWX +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.rfaoyFquud ++++ mktemp +++ local LAST_ERR=/tmp/tmp.EOVbjcCRSv +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.rfaoyFquud +++ cat /tmp/tmp.EOVbjcCRSv +++ rm /tmp/tmp.rfaoyFquud /tmp/tmp.EOVbjcCRSv +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-16979 ++ mktemp + local LAST_OUT=/tmp/tmp.lbuw1YSeco ++ mktemp + local LAST_ERR=/tmp/tmp.lEYo3XJtl7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-16979 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-16979 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-16979 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.lbuw1YSeco + cat /tmp/tmp.lEYo3XJtl7 error: no matching resources found + rm /tmp/tmp.lbuw1YSeco /tmp/tmp.lEYo3XJtl7 + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h2uDtF4gIz +++ mktemp ++ local LAST_ERR=/tmp/tmp.Mfuns7VxCr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h2uDtF4gIz ++ cat /tmp/tmp.Mfuns7VxCr ++ rm /tmp/tmp.h2uDtF4gIz /tmp/tmp.Mfuns7VxCr ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o3fjgcmyfp +++ mktemp ++ local LAST_ERR=/tmp/tmp.dC5PELGgPv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o3fjgcmyfp ++ cat /tmp/tmp.dC5PELGgPv ++ rm /tmp/tmp.o3fjgcmyfp /tmp/tmp.dC5PELGgPv ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KvW2pbEigA +++ mktemp ++ local LAST_ERR=/tmp/tmp.W3qchHYuRM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KvW2pbEigA ++ cat /tmp/tmp.W3qchHYuRM ++ rm /tmp/tmp.KvW2pbEigA /tmp/tmp.W3qchHYuRM ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1.sql /tmp/tmp.8X6qenpyRR/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xQJS3BbCNa +++ mktemp ++ local LAST_ERR=/tmp/tmp.lbDVaHHt7g ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xQJS3BbCNa ++ cat /tmp/tmp.lbDVaHHt7g ++ rm /tmp/tmp.xQJS3BbCNa /tmp/tmp.lbDVaHHt7g ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1.sql /tmp/tmp.8X6qenpyRR/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BwymoXqUxv +++ mktemp ++ local LAST_ERR=/tmp/tmp.RLxY80v8m9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BwymoXqUxv ++ cat /tmp/tmp.RLxY80v8m9 ++ rm /tmp/tmp.BwymoXqUxv /tmp/tmp.RLxY80v8m9 ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-1.sql /tmp/tmp.8X6qenpyRR/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pezwLXNhCe +++ mktemp ++ local LAST_ERR=/tmp/tmp.o7rOJVN3rU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pezwLXNhCe ++ cat /tmp/tmp.o7rOJVN3rU Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.pezwLXNhCe /tmp/tmp.o7rOJVN3rU ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.oItuK8YjXU ++ mktemp + local LAST_ERR=/tmp/tmp.QUd2kErpJX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oItuK8YjXU secret/my-cluster-secrets patched + cat /tmp/tmp.QUd2kErpJX + rm /tmp/tmp.oItuK8YjXU /tmp/tmp.QUd2kErpJX + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4qdHuN5rH1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YqF3mjLm9j ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4qdHuN5rH1 ++ cat /tmp/tmp.YqF3mjLm9j ++ rm /tmp/tmp.4qdHuN5rH1 /tmp/tmp.YqF3mjLm9j ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.8X6qenpyRR/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.1H0iXpYer6 ++ mktemp + local LAST_ERR=/tmp/tmp.PDmx8DEOJL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1H0iXpYer6 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.PDmx8DEOJL + rm /tmp/tmp.1H0iXpYer6 /tmp/tmp.PDmx8DEOJL + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WQKhC1nehx +++ mktemp ++ local LAST_ERR=/tmp/tmp.tbTHLSRUzv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WQKhC1nehx ++ cat /tmp/tmp.tbTHLSRUzv ++ rm /tmp/tmp.WQKhC1nehx /tmp/tmp.tbTHLSRUzv ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0ApAIBn9WF +++ mktemp ++ local LAST_ERR=/tmp/tmp.hYFSBXPZp5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0ApAIBn9WF ++ cat /tmp/tmp.hYFSBXPZp5 ++ rm /tmp/tmp.0ApAIBn9WF /tmp/tmp.hYFSBXPZp5 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.pIaJiWu42y ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jf1QoKWccJ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.pIaJiWu42y +++++ cat /tmp/tmp.jf1QoKWccJ +++++ rm /tmp/tmp.pIaJiWu42y /tmp/tmp.jf1QoKWccJ +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.YC18PpdT5i ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4RGMGzRV2l +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.YC18PpdT5i +++++ cat /tmp/tmp.4RGMGzRV2l +++++ rm /tmp/tmp.YC18PpdT5i /tmp/tmp.4RGMGzRV2l +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JejriScwEt +++ mktemp ++ local LAST_ERR=/tmp/tmp.tUGTTRSeOY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JejriScwEt ++ cat /tmp/tmp.tUGTTRSeOY ++ rm /tmp/tmp.JejriScwEt /tmp/tmp.tUGTTRSeOY ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.penFfkTA3S ++ mktemp + local LAST_ERR=/tmp/tmp.ifh5IZtD5i + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.penFfkTA3S secret/my-cluster-secrets patched + cat /tmp/tmp.ifh5IZtD5i + rm /tmp/tmp.penFfkTA3S /tmp/tmp.ifh5IZtD5i + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2xg2oDoaDN +++ mktemp ++ local LAST_ERR=/tmp/tmp.SX8gbHrVAf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2xg2oDoaDN ++ cat /tmp/tmp.SX8gbHrVAf ++ rm /tmp/tmp.2xg2oDoaDN /tmp/tmp.SX8gbHrVAf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9uwlyWnpVe +++ mktemp ++ local LAST_ERR=/tmp/tmp.4jMNR6RElS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9uwlyWnpVe ++ cat /tmp/tmp.4jMNR6RElS ++ rm /tmp/tmp.9uwlyWnpVe /tmp/tmp.4jMNR6RElS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2lcOXflnUY +++ mktemp ++ local LAST_ERR=/tmp/tmp.ajGdW3DXPo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2lcOXflnUY ++ cat /tmp/tmp.ajGdW3DXPo ++ rm /tmp/tmp.2lcOXflnUY /tmp/tmp.ajGdW3DXPo ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YZxyEmVnxc +++ mktemp ++ local LAST_ERR=/tmp/tmp.s6lvrcYlbN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YZxyEmVnxc ++ cat /tmp/tmp.s6lvrcYlbN ++ rm /tmp/tmp.YZxyEmVnxc /tmp/tmp.s6lvrcYlbN ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.2vHJQpcV0B ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.6uGvUAsFKw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.2vHJQpcV0B +++++ cat /tmp/tmp.6uGvUAsFKw +++++ rm /tmp/tmp.2vHJQpcV0B /tmp/tmp.6uGvUAsFKw +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.eOyl4MKCPf ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.V49nIGIIHq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.eOyl4MKCPf +++++ cat /tmp/tmp.V49nIGIIHq +++++ rm /tmp/tmp.eOyl4MKCPf /tmp/tmp.V49nIGIIHq +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J7lGkk7NV6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uRUDTReb7U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J7lGkk7NV6 ++ cat /tmp/tmp.uRUDTReb7U ++ rm /tmp/tmp.J7lGkk7NV6 /tmp/tmp.uRUDTReb7U ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2.sql /tmp/tmp.8X6qenpyRR/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2.sql /tmp/tmp.8X6qenpyRR/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-2.sql /tmp/tmp.8X6qenpyRR/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.cgGgQEPBtR ++ mktemp + local LAST_ERR=/tmp/tmp.R20qiTka5c + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cgGgQEPBtR perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.R20qiTka5c + rm /tmp/tmp.cgGgQEPBtR /tmp/tmp.R20qiTka5c + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.3BJrG9blSw ++ mktemp + local LAST_ERR=/tmp/tmp.nnP6dDiiS9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3BJrG9blSw secret/my-cluster-secrets patched + cat /tmp/tmp.nnP6dDiiS9 + rm /tmp/tmp.3BJrG9blSw /tmp/tmp.nnP6dDiiS9 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HQD7qOqu5M +++ mktemp ++ local LAST_ERR=/tmp/tmp.QmBfktY9MB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HQD7qOqu5M ++ cat /tmp/tmp.QmBfktY9MB ++ rm /tmp/tmp.HQD7qOqu5M /tmp/tmp.QmBfktY9MB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yeDPDRBypP +++ mktemp ++ local LAST_ERR=/tmp/tmp.4KiRjnhTdw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yeDPDRBypP ++ cat /tmp/tmp.4KiRjnhTdw ++ rm /tmp/tmp.yeDPDRBypP /tmp/tmp.4KiRjnhTdw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0dQtyV8LJA +++ mktemp ++ local LAST_ERR=/tmp/tmp.TMCxAymEEu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0dQtyV8LJA ++ cat /tmp/tmp.TMCxAymEEu ++ rm /tmp/tmp.0dQtyV8LJA /tmp/tmp.TMCxAymEEu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lmKqwurb0r +++ mktemp ++ local LAST_ERR=/tmp/tmp.QtrPvUXzZ6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lmKqwurb0r ++ cat /tmp/tmp.QtrPvUXzZ6 ++ rm /tmp/tmp.lmKqwurb0r /tmp/tmp.QtrPvUXzZ6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9WqkLM5u0P +++ mktemp ++ local LAST_ERR=/tmp/tmp.EmqZvomoJq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9WqkLM5u0P ++ cat /tmp/tmp.EmqZvomoJq ++ rm /tmp/tmp.9WqkLM5u0P /tmp/tmp.EmqZvomoJq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VxTYalvrGk +++ mktemp ++ local LAST_ERR=/tmp/tmp.cvp1LyoldE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VxTYalvrGk ++ cat /tmp/tmp.cvp1LyoldE ++ rm /tmp/tmp.VxTYalvrGk /tmp/tmp.cvp1LyoldE ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AUMIlAiomr +++ mktemp ++ local LAST_ERR=/tmp/tmp.n1OVfxpxMG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AUMIlAiomr ++ cat /tmp/tmp.n1OVfxpxMG ++ rm /tmp/tmp.AUMIlAiomr /tmp/tmp.n1OVfxpxMG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8s8m4sHPGB +++ mktemp ++ local LAST_ERR=/tmp/tmp.oX9Mrn9fjI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8s8m4sHPGB ++ cat /tmp/tmp.oX9Mrn9fjI ++ rm /tmp/tmp.8s8m4sHPGB /tmp/tmp.oX9Mrn9fjI ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2ijQxvJyli +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hspfjk2wiW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2ijQxvJyli ++ cat /tmp/tmp.Hspfjk2wiW ++ rm /tmp/tmp.2ijQxvJyli /tmp/tmp.Hspfjk2wiW ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.h1Bf7E73RH ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.IYHoNBN7uY +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.h1Bf7E73RH +++++ cat /tmp/tmp.IYHoNBN7uY +++++ rm /tmp/tmp.h1Bf7E73RH /tmp/tmp.IYHoNBN7uY +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.forRZwpqat ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.AdDWZUgfNy +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.forRZwpqat +++++ cat /tmp/tmp.AdDWZUgfNy +++++ rm /tmp/tmp.forRZwpqat /tmp/tmp.AdDWZUgfNy +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lNd1lqTR9z +++ mktemp ++ local LAST_ERR=/tmp/tmp.141oVkJ0FV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lNd1lqTR9z ++ cat /tmp/tmp.141oVkJ0FV ++ rm /tmp/tmp.lNd1lqTR9z /tmp/tmp.141oVkJ0FV ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-3.sql /tmp/tmp.8X6qenpyRR/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.jHe47qFVvO ++ mktemp + local LAST_ERR=/tmp/tmp.X0soBbB950 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jHe47qFVvO secret/my-cluster-secrets patched + cat /tmp/tmp.X0soBbB950 + rm /tmp/tmp.jHe47qFVvO /tmp/tmp.X0soBbB950 + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.EMW0QzW6vE +++ mktemp ++ local LAST_ERR=/tmp/tmp.9hMndcKwyh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EMW0QzW6vE ++ cat /tmp/tmp.9hMndcKwyh ++ rm /tmp/tmp.EMW0QzW6vE /tmp/tmp.9hMndcKwyh ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K4PZKmdjkP +++ mktemp ++ local LAST_ERR=/tmp/tmp.RVgZ0isnzz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K4PZKmdjkP ++ cat /tmp/tmp.RVgZ0isnzz ++ rm /tmp/tmp.K4PZKmdjkP /tmp/tmp.RVgZ0isnzz ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password update' waiting for password update + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CUPbgbFsvK +++ mktemp ++ local LAST_ERR=/tmp/tmp.PV2gtfqRvC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CUPbgbFsvK ++ cat /tmp/tmp.PV2gtfqRvC ++ rm /tmp/tmp.CUPbgbFsvK /tmp/tmp.PV2gtfqRvC ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gaNhL9BDg1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.udxHJB83Pt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gaNhL9BDg1 ++ cat /tmp/tmp.udxHJB83Pt ++ rm /tmp/tmp.gaNhL9BDg1 /tmp/tmp.udxHJB83Pt ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.91uX0bfmUb +++ mktemp ++ local LAST_ERR=/tmp/tmp.RCPWCS1OqU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.91uX0bfmUb ++ cat /tmp/tmp.RCPWCS1OqU ++ rm /tmp/tmp.91uX0bfmUb /tmp/tmp.RCPWCS1OqU ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lbm1ic50VF +++ mktemp ++ local LAST_ERR=/tmp/tmp.DNBt3VHOEi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lbm1ic50VF ++ cat /tmp/tmp.DNBt3VHOEi ++ rm /tmp/tmp.lbm1ic50VF /tmp/tmp.DNBt3VHOEi ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BuDGbHBoao +++ mktemp ++ local LAST_ERR=/tmp/tmp.iAZdigSV16 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BuDGbHBoao ++ cat /tmp/tmp.iAZdigSV16 ++ rm /tmp/tmp.BuDGbHBoao /tmp/tmp.iAZdigSV16 ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lpSYq01vld +++ mktemp ++ local LAST_ERR=/tmp/tmp.H9iBRbasSN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lpSYq01vld ++ cat /tmp/tmp.H9iBRbasSN ++ rm /tmp/tmp.lpSYq01vld /tmp/tmp.H9iBRbasSN ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6p21lknOUq +++ mktemp ++ local LAST_ERR=/tmp/tmp.T089E9vObI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6p21lknOUq ++ cat /tmp/tmp.T089E9vObI ++ rm /tmp/tmp.6p21lknOUq /tmp/tmp.T089E9vObI ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eND74Hp388 +++ mktemp ++ local LAST_ERR=/tmp/tmp.pKYRjPyzs2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eND74Hp388 ++ cat /tmp/tmp.pKYRjPyzs2 ++ rm /tmp/tmp.eND74Hp388 /tmp/tmp.pKYRjPyzs2 ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.znN3INIOjo +++ mktemp ++ local LAST_ERR=/tmp/tmp.DTmt3kYBQL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.znN3INIOjo ++ cat /tmp/tmp.DTmt3kYBQL ++ rm /tmp/tmp.znN3INIOjo /tmp/tmp.DTmt3kYBQL ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UluaaVpVwY +++ mktemp ++ local LAST_ERR=/tmp/tmp.YWdqCc15bj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UluaaVpVwY ++ cat /tmp/tmp.YWdqCc15bj ++ rm /tmp/tmp.UluaaVpVwY /tmp/tmp.YWdqCc15bj ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uqi94lGvZj +++ mktemp ++ local LAST_ERR=/tmp/tmp.3vzdOccH1c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uqi94lGvZj ++ cat /tmp/tmp.3vzdOccH1c ++ rm /tmp/tmp.uqi94lGvZj /tmp/tmp.3vzdOccH1c ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 10 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.d5VIk7vFZY +++ mktemp ++ local LAST_ERR=/tmp/tmp.NH136p5cZY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.d5VIk7vFZY ++ cat /tmp/tmp.NH136p5cZY ++ rm /tmp/tmp.d5VIk7vFZY /tmp/tmp.NH136p5cZY ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 11 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i94U4mMu6S +++ mktemp ++ local LAST_ERR=/tmp/tmp.ViLpEvookR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i94U4mMu6S ++ cat /tmp/tmp.ViLpEvookR ++ rm /tmp/tmp.i94U4mMu6S /tmp/tmp.ViLpEvookR ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 12 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jjNXvDHX44 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Is8ULd96eD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jjNXvDHX44 ++ cat /tmp/tmp.Is8ULd96eD ++ rm /tmp/tmp.jjNXvDHX44 /tmp/tmp.Is8ULd96eD ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dELYrlFQnC +++ mktemp ++ local LAST_ERR=/tmp/tmp.4p3TkCyXDC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dELYrlFQnC ++ cat /tmp/tmp.4p3TkCyXDC ++ rm /tmp/tmp.dELYrlFQnC /tmp/tmp.4p3TkCyXDC ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z8tsR3gV53 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YJbCsnz8dt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z8tsR3gV53 ++ cat /tmp/tmp.YJbCsnz8dt ++ rm /tmp/tmp.Z8tsR3gV53 /tmp/tmp.YJbCsnz8dt ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.wIfPDzYRI3 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.9oFoBC4H0B +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.wIfPDzYRI3 +++++ cat /tmp/tmp.9oFoBC4H0B +++++ rm /tmp/tmp.wIfPDzYRI3 /tmp/tmp.9oFoBC4H0B +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tMweBp80Sh ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.lVny121rwO +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tMweBp80Sh +++++ cat /tmp/tmp.lVny121rwO +++++ rm /tmp/tmp.tMweBp80Sh /tmp/tmp.lVny121rwO +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FqRQeXXgIB +++ mktemp ++ local LAST_ERR=/tmp/tmp.ea2X3wbIib ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FqRQeXXgIB ++ cat /tmp/tmp.ea2X3wbIib ++ rm /tmp/tmp.FqRQeXXgIB /tmp/tmp.ea2X3wbIib ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hCMnxbrd5o +++ mktemp ++ local LAST_ERR=/tmp/tmp.JQ7o0Zo4eG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hCMnxbrd5o ++ cat /tmp/tmp.JQ7o0Zo4eG ++ rm /tmp/tmp.hCMnxbrd5o /tmp/tmp.JQ7o0Zo4eG ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.8X6qenpyRR/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.iqsSgbjsmi ++ mktemp + local LAST_ERR=/tmp/tmp.RLvISEgYMJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iqsSgbjsmi secret/my-cluster-secrets patched + cat /tmp/tmp.RLvISEgYMJ + rm /tmp/tmp.iqsSgbjsmi /tmp/tmp.RLvISEgYMJ + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bIESNUTEsZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.hHznBKBEeJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bIESNUTEsZ ++ cat /tmp/tmp.hHznBKBEeJ ++ rm /tmp/tmp.bIESNUTEsZ /tmp/tmp.hHznBKBEeJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FUjgX5fBwu +++ mktemp ++ local LAST_ERR=/tmp/tmp.tGSBAnCSo4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FUjgX5fBwu ++ cat /tmp/tmp.tGSBAnCSo4 ++ rm /tmp/tmp.FUjgX5fBwu /tmp/tmp.tGSBAnCSo4 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ApsZCkex1X +++ mktemp ++ local LAST_ERR=/tmp/tmp.qGvjfSbNK2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ApsZCkex1X ++ cat /tmp/tmp.qGvjfSbNK2 ++ rm /tmp/tmp.ApsZCkex1X /tmp/tmp.qGvjfSbNK2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.3WcU5pYISS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.yu2vpombRH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.3WcU5pYISS +++++ cat /tmp/tmp.yu2vpombRH +++++ rm /tmp/tmp.3WcU5pYISS /tmp/tmp.yu2vpombRH +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.wChJg7ncqc ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.BobeVV8uDr +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.wChJg7ncqc +++++ cat /tmp/tmp.BobeVV8uDr +++++ rm /tmp/tmp.wChJg7ncqc /tmp/tmp.BobeVV8uDr +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vrCsBUgnE1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.mqwQftBKm0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vrCsBUgnE1 ++ cat /tmp/tmp.mqwQftBKm0 ++ rm /tmp/tmp.vrCsBUgnE1 /tmp/tmp.mqwQftBKm0 ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MM70ibobXS +++ mktemp ++ local LAST_ERR=/tmp/tmp.DkaWKDkDOl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MM70ibobXS ++ cat /tmp/tmp.DkaWKDkDOl ++ rm /tmp/tmp.MM70ibobXS /tmp/tmp.DkaWKDkDOl ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.8X6qenpyRR/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.31ZZvGX6y7 ++ mktemp + local LAST_ERR=/tmp/tmp.F3F23fWvEe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.31ZZvGX6y7 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.F3F23fWvEe + rm /tmp/tmp.31ZZvGX6y7 /tmp/tmp.F3F23fWvEe + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vh7ZGiJYa1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.D8p0UgU4Hx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vh7ZGiJYa1 ++ cat /tmp/tmp.D8p0UgU4Hx ++ rm /tmp/tmp.Vh7ZGiJYa1 /tmp/tmp.D8p0UgU4Hx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kJZqmv1UPk +++ mktemp ++ local LAST_ERR=/tmp/tmp.RJE4uIDowR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kJZqmv1UPk ++ cat /tmp/tmp.RJE4uIDowR ++ rm /tmp/tmp.kJZqmv1UPk /tmp/tmp.RJE4uIDowR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m91EsYBRrr +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z1MwT9RYUk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m91EsYBRrr ++ cat /tmp/tmp.Z1MwT9RYUk ++ rm /tmp/tmp.m91EsYBRrr /tmp/tmp.Z1MwT9RYUk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dBO4zEJSM6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.fXCSo3QNno ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dBO4zEJSM6 ++ cat /tmp/tmp.fXCSo3QNno ++ rm /tmp/tmp.dBO4zEJSM6 /tmp/tmp.fXCSo3QNno ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qHeI9wZHJ0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.19jWKVgJrI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qHeI9wZHJ0 ++ cat /tmp/tmp.19jWKVgJrI ++ rm /tmp/tmp.qHeI9wZHJ0 /tmp/tmp.19jWKVgJrI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yaYqC5TnYE +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fj6KBSOJHL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yaYqC5TnYE ++ cat /tmp/tmp.Fj6KBSOJHL ++ rm /tmp/tmp.yaYqC5TnYE /tmp/tmp.Fj6KBSOJHL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0MBlR1qcva +++ mktemp ++ local LAST_ERR=/tmp/tmp.TCyAv0GiWb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0MBlR1qcva ++ cat /tmp/tmp.TCyAv0GiWb ++ rm /tmp/tmp.0MBlR1qcva /tmp/tmp.TCyAv0GiWb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZtrTRPemxT +++ mktemp ++ local LAST_ERR=/tmp/tmp.3yhZE5UiKI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZtrTRPemxT ++ cat /tmp/tmp.3yhZE5UiKI ++ rm /tmp/tmp.ZtrTRPemxT /tmp/tmp.3yhZE5UiKI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JEyDwnCPeO +++ mktemp ++ local LAST_ERR=/tmp/tmp.rqWCWRk6Vy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JEyDwnCPeO ++ cat /tmp/tmp.rqWCWRk6Vy ++ rm /tmp/tmp.JEyDwnCPeO /tmp/tmp.rqWCWRk6Vy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Acy6UyMKYY +++ mktemp ++ local LAST_ERR=/tmp/tmp.uT6ZLb95XB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Acy6UyMKYY ++ cat /tmp/tmp.uT6ZLb95XB ++ rm /tmp/tmp.Acy6UyMKYY /tmp/tmp.uT6ZLb95XB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HTKWmUSaVq +++ mktemp ++ local LAST_ERR=/tmp/tmp.MRxm6tIgz2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HTKWmUSaVq ++ cat /tmp/tmp.MRxm6tIgz2 ++ rm /tmp/tmp.HTKWmUSaVq /tmp/tmp.MRxm6tIgz2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1vo8er8byK +++ mktemp ++ local LAST_ERR=/tmp/tmp.7Ke22K1dUw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1vo8er8byK ++ cat /tmp/tmp.7Ke22K1dUw ++ rm /tmp/tmp.1vo8er8byK /tmp/tmp.7Ke22K1dUw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nBZHX5iPe1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.b6AiXC4VDm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nBZHX5iPe1 ++ cat /tmp/tmp.b6AiXC4VDm ++ rm /tmp/tmp.nBZHX5iPe1 /tmp/tmp.b6AiXC4VDm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qABlgFIoGX +++ mktemp ++ local LAST_ERR=/tmp/tmp.XnSTlqY7nW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qABlgFIoGX ++ cat /tmp/tmp.XnSTlqY7nW ++ rm /tmp/tmp.qABlgFIoGX /tmp/tmp.XnSTlqY7nW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0zOPHpOpnQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.p3ft7ya2SQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0zOPHpOpnQ ++ cat /tmp/tmp.p3ft7ya2SQ ++ rm /tmp/tmp.0zOPHpOpnQ /tmp/tmp.p3ft7ya2SQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 14 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IHSOZuBPvA +++ mktemp ++ local LAST_ERR=/tmp/tmp.H0VTYHkJC2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IHSOZuBPvA ++ cat /tmp/tmp.H0VTYHkJC2 ++ rm /tmp/tmp.IHSOZuBPvA /tmp/tmp.H0VTYHkJC2 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6w1f7pKO99 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vna3g5Ty2w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6w1f7pKO99 ++ cat /tmp/tmp.Vna3g5Ty2w ++ rm /tmp/tmp.6w1f7pKO99 /tmp/tmp.Vna3g5Ty2w ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.H9nkawdhyF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.n075tXtHFL +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.H9nkawdhyF +++++ cat /tmp/tmp.n075tXtHFL +++++ rm /tmp/tmp.H9nkawdhyF /tmp/tmp.n075tXtHFL +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.1oyWOVUiv7 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.czyeyDgbJG +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.1oyWOVUiv7 +++++ cat /tmp/tmp.czyeyDgbJG +++++ rm /tmp/tmp.1oyWOVUiv7 /tmp/tmp.czyeyDgbJG +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bjSK2Rw4en +++ mktemp ++ local LAST_ERR=/tmp/tmp.1aTxiveTGP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bjSK2Rw4en ++ cat /tmp/tmp.1aTxiveTGP ++ rm /tmp/tmp.bjSK2Rw4en /tmp/tmp.1aTxiveTGP ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Su7eFYeQj6 ++ mktemp + local LAST_ERR=/tmp/tmp.TgbLQzuded + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Su7eFYeQj6 secret/my-cluster-secrets-2 patched + cat /tmp/tmp.TgbLQzuded + rm /tmp/tmp.Su7eFYeQj6 /tmp/tmp.TgbLQzuded + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vgp0X7kO2D +++ mktemp ++ local LAST_ERR=/tmp/tmp.zScRHr3TjD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vgp0X7kO2D ++ cat /tmp/tmp.zScRHr3TjD ++ rm /tmp/tmp.vgp0X7kO2D /tmp/tmp.zScRHr3TjD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RcoO8GBwHD +++ mktemp ++ local LAST_ERR=/tmp/tmp.U9euS4bOdI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RcoO8GBwHD ++ cat /tmp/tmp.U9euS4bOdI ++ rm /tmp/tmp.RcoO8GBwHD /tmp/tmp.U9euS4bOdI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8CpuvrwsfF +++ mktemp ++ local LAST_ERR=/tmp/tmp.T4k5YngsQl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8CpuvrwsfF ++ cat /tmp/tmp.T4k5YngsQl ++ rm /tmp/tmp.8CpuvrwsfF /tmp/tmp.T4k5YngsQl ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FsGgTBmLhR +++ mktemp ++ local LAST_ERR=/tmp/tmp.t6zwq0qLsl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FsGgTBmLhR ++ cat /tmp/tmp.t6zwq0qLsl ++ rm /tmp/tmp.FsGgTBmLhR /tmp/tmp.t6zwq0qLsl ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.zyZHNkun75 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4GwQxLuawb +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.zyZHNkun75 +++++ cat /tmp/tmp.4GwQxLuawb +++++ rm /tmp/tmp.zyZHNkun75 /tmp/tmp.4GwQxLuawb +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.B0PGAlvjkK ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.wsMHJwxat6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.B0PGAlvjkK +++++ cat /tmp/tmp.wsMHJwxat6 +++++ rm /tmp/tmp.B0PGAlvjkK /tmp/tmp.wsMHJwxat6 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nDXbtLy284 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rVpX3EFLIe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nDXbtLy284 ++ cat /tmp/tmp.rVpX3EFLIe ++ rm /tmp/tmp.nDXbtLy284 /tmp/tmp.rVpX3EFLIe ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mktQPc7vbk +++ mktemp ++ local LAST_ERR=/tmp/tmp.CNTdGv10gK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mktQPc7vbk ++ cat /tmp/tmp.CNTdGv10gK ++ rm /tmp/tmp.mktQPc7vbk /tmp/tmp.CNTdGv10gK ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.8X6qenpyRR/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.EjeRxCWHdS +++ mktemp ++ local LAST_ERR=/tmp/tmp.3v79O0z5L4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EjeRxCWHdS ++ cat /tmp/tmp.3v79O0z5L4 ++ rm /tmp/tmp.EjeRxCWHdS /tmp/tmp.3v79O0z5L4 ++ return 0 + newpass='f15J-K4B8kv3S%>roM' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''f15J-K4B8kv3S%>roM'\'';' '-h some-name-pxc -uroot -p'\''f15J-K4B8kv3S%>roM'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''f15J-K4B8kv3S%>roM'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''f15J-K4B8kv3S%>roM'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T0FEGp3CHy +++ mktemp ++ local LAST_ERR=/tmp/tmp.9d9Pb4jwWP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T0FEGp3CHy ++ cat /tmp/tmp.9d9Pb4jwWP ++ rm /tmp/tmp.T0FEGp3CHy /tmp/tmp.9d9Pb4jwWP ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''f15J-K4B8kv3S%>roM'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''f15J-K4B8kv3S%>roM'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''f15J-K4B8kv3S%>roM'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''f15J-K4B8kv3S%>roM'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rZMi0wdABU +++ mktemp ++ local LAST_ERR=/tmp/tmp.f9SAe5U9S5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rZMi0wdABU ++ cat /tmp/tmp.f9SAe5U9S5 ++ rm /tmp/tmp.rZMi0wdABU /tmp/tmp.f9SAe5U9S5 ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.8X6qenpyRR/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.CTBfXMM6h9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.nPdhlrxJDV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CTBfXMM6h9 ++ cat /tmp/tmp.nPdhlrxJDV ++ rm /tmp/tmp.CTBfXMM6h9 /tmp/tmp.nPdhlrxJDV ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.KHsdrpiQ04 ++ mktemp + local LAST_ERR=/tmp/tmp.syMP6A3VC9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KHsdrpiQ04 secret/my-cluster-secrets-2 configured + cat /tmp/tmp.syMP6A3VC9 Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.KHsdrpiQ04 /tmp/tmp.syMP6A3VC9 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UibOYjy40A +++ mktemp ++ local LAST_ERR=/tmp/tmp.ww1bLXN8Vl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UibOYjy40A ++ cat /tmp/tmp.ww1bLXN8Vl ++ rm /tmp/tmp.UibOYjy40A /tmp/tmp.ww1bLXN8Vl ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.8X6qenpyRR/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_OUT=/tmp/tmp.h3aumW6Cf0 + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1710-fb797906#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-16979~ + local LAST_ERR=/tmp/tmp.maCuJCJJJp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.h3aumW6Cf0 perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.maCuJCJJJp + rm /tmp/tmp.h3aumW6Cf0 /tmp/tmp.maCuJCJJJp + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3zLyePrbZt +++ mktemp ++ local LAST_ERR=/tmp/tmp.35CNu359c4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3zLyePrbZt ++ cat /tmp/tmp.35CNu359c4 ++ rm /tmp/tmp.3zLyePrbZt /tmp/tmp.35CNu359c4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zD7yAHh7hV +++ mktemp ++ local LAST_ERR=/tmp/tmp.l5wQSXcsQg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zD7yAHh7hV ++ cat /tmp/tmp.l5wQSXcsQg ++ rm /tmp/tmp.zD7yAHh7hV /tmp/tmp.l5wQSXcsQg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PiRUyvsBvE +++ mktemp ++ local LAST_ERR=/tmp/tmp.YE6BwILFsz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PiRUyvsBvE ++ cat /tmp/tmp.YE6BwILFsz ++ rm /tmp/tmp.PiRUyvsBvE /tmp/tmp.YE6BwILFsz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PbOSpmM5UG +++ mktemp ++ local LAST_ERR=/tmp/tmp.8hiUTeSwHp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PbOSpmM5UG ++ cat /tmp/tmp.8hiUTeSwHp ++ rm /tmp/tmp.PbOSpmM5UG /tmp/tmp.8hiUTeSwHp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JH8eY73Csj +++ mktemp ++ local LAST_ERR=/tmp/tmp.pmHvPdIa4K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JH8eY73Csj ++ cat /tmp/tmp.pmHvPdIa4K ++ rm /tmp/tmp.JH8eY73Csj /tmp/tmp.pmHvPdIa4K ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.G2cM0VCk9R +++ mktemp ++ local LAST_ERR=/tmp/tmp.92GBJ3DidQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.G2cM0VCk9R ++ cat /tmp/tmp.92GBJ3DidQ ++ rm /tmp/tmp.G2cM0VCk9R /tmp/tmp.92GBJ3DidQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PHSNuBJhcT +++ mktemp ++ local LAST_ERR=/tmp/tmp.t8Vsv4lqtD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PHSNuBJhcT ++ cat /tmp/tmp.t8Vsv4lqtD ++ rm /tmp/tmp.PHSNuBJhcT /tmp/tmp.t8Vsv4lqtD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bvyQJPTRZx +++ mktemp ++ local LAST_ERR=/tmp/tmp.J4uTqXcUXU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bvyQJPTRZx ++ cat /tmp/tmp.J4uTqXcUXU ++ rm /tmp/tmp.bvyQJPTRZx /tmp/tmp.J4uTqXcUXU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eS73Cv2GDV +++ mktemp ++ local LAST_ERR=/tmp/tmp.N7TetSXurk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eS73Cv2GDV ++ cat /tmp/tmp.N7TetSXurk ++ rm /tmp/tmp.eS73Cv2GDV /tmp/tmp.N7TetSXurk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pQ45u38gnK +++ mktemp ++ local LAST_ERR=/tmp/tmp.47rykoo5Am ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pQ45u38gnK ++ cat /tmp/tmp.47rykoo5Am ++ rm /tmp/tmp.pQ45u38gnK /tmp/tmp.47rykoo5Am ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TMIbr9vZfs +++ mktemp ++ local LAST_ERR=/tmp/tmp.GnzRelmsYB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TMIbr9vZfs ++ cat /tmp/tmp.GnzRelmsYB ++ rm /tmp/tmp.TMIbr9vZfs /tmp/tmp.GnzRelmsYB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DpgQ279CMv +++ mktemp ++ local LAST_ERR=/tmp/tmp.ANbTaG7SaX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DpgQ279CMv ++ cat /tmp/tmp.ANbTaG7SaX ++ rm /tmp/tmp.DpgQ279CMv /tmp/tmp.ANbTaG7SaX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pD6Dkca5pp +++ mktemp ++ local LAST_ERR=/tmp/tmp.AadUy38qiO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pD6Dkca5pp ++ cat /tmp/tmp.AadUy38qiO ++ rm /tmp/tmp.pD6Dkca5pp /tmp/tmp.AadUy38qiO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1HEAOJ1zYt +++ mktemp ++ local LAST_ERR=/tmp/tmp.xfp2370HQ9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1HEAOJ1zYt ++ cat /tmp/tmp.xfp2370HQ9 ++ rm /tmp/tmp.1HEAOJ1zYt /tmp/tmp.xfp2370HQ9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.df9uKK2Sii +++ mktemp ++ local LAST_ERR=/tmp/tmp.xfZ51Mps5n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.df9uKK2Sii ++ cat /tmp/tmp.xfZ51Mps5n ++ rm /tmp/tmp.df9uKK2Sii /tmp/tmp.xfZ51Mps5n ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2ri04D462n +++ mktemp ++ local LAST_ERR=/tmp/tmp.PTR69O9zeE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2ri04D462n ++ cat /tmp/tmp.PTR69O9zeE ++ rm /tmp/tmp.2ri04D462n /tmp/tmp.PTR69O9zeE ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.APrI3VunmH ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.sz2hWwB7Ev +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.APrI3VunmH +++++ cat /tmp/tmp.sz2hWwB7Ev +++++ rm /tmp/tmp.APrI3VunmH /tmp/tmp.sz2hWwB7Ev +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uCsbDZCmg0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.spziGYHa34 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uCsbDZCmg0 ++ cat /tmp/tmp.spziGYHa34 ++ rm /tmp/tmp.uCsbDZCmg0 /tmp/tmp.spziGYHa34 ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.0d7xwVguZI ++ mktemp + local LAST_ERR=/tmp/tmp.UcoeCWm9tV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0d7xwVguZI secret/my-cluster-secrets patched + cat /tmp/tmp.UcoeCWm9tV + rm /tmp/tmp.0d7xwVguZI /tmp/tmp.UcoeCWm9tV + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HOchcFeBVe +++ mktemp ++ local LAST_ERR=/tmp/tmp.IuIxgKRkhP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HOchcFeBVe ++ cat /tmp/tmp.IuIxgKRkhP ++ rm /tmp/tmp.HOchcFeBVe /tmp/tmp.IuIxgKRkhP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W6NriGy3J0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yQ1CVbPntH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W6NriGy3J0 ++ cat /tmp/tmp.yQ1CVbPntH ++ rm /tmp/tmp.W6NriGy3J0 /tmp/tmp.yQ1CVbPntH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kk35lV4Ad3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WtTLocXoAp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kk35lV4Ad3 ++ cat /tmp/tmp.WtTLocXoAp ++ rm /tmp/tmp.kk35lV4Ad3 /tmp/tmp.WtTLocXoAp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SSXnAVi1VE +++ mktemp ++ local LAST_ERR=/tmp/tmp.iEPKXyZsjj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SSXnAVi1VE ++ cat /tmp/tmp.iEPKXyZsjj ++ rm /tmp/tmp.SSXnAVi1VE /tmp/tmp.iEPKXyZsjj ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bghbdkX0wJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.STqiOWs9OF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bghbdkX0wJ ++ cat /tmp/tmp.STqiOWs9OF ++ rm /tmp/tmp.bghbdkX0wJ /tmp/tmp.STqiOWs9OF ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.ZsS2R2g17t ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bp5m4zyPoh +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.ZsS2R2g17t +++++ cat /tmp/tmp.bp5m4zyPoh +++++ rm /tmp/tmp.ZsS2R2g17t /tmp/tmp.bp5m4zyPoh +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mhvDUz8RG0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ejIdp5MJnL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mhvDUz8RG0 ++ cat /tmp/tmp.ejIdp5MJnL ++ rm /tmp/tmp.mhvDUz8RG0 /tmp/tmp.ejIdp5MJnL ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cqnbokFZR8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ud6VYX2Wpy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cqnbokFZR8 ++ cat /tmp/tmp.ud6VYX2Wpy ++ rm /tmp/tmp.cqnbokFZR8 /tmp/tmp.ud6VYX2Wpy ++ return 0 + client_pod=pxc-client-6644d8898f-h6mz4 + wait_pod pxc-client-6644d8898f-h6mz4 + local pod=pxc-client-6644d8898f-h6mz4 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-h6mz4 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-h6mz4 condition met pxc-client-6644d8898f-h6mz4.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.8X6qenpyRR/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1710/e2e-tests/users/compare/select-3.sql /tmp/tmp.8X6qenpyRR/select-3.sql + destroy users-16979 + local namespace=users-16979 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v 'the object has been modified' ++ get_operator_pod + grep -v 'get backup status: Job.batch' ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v level=info + sort -u + tee /tmp/tmp.8X6qenpyRR/operator.log +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.fPuaEG4iVk +++ mktemp ++ local LAST_ERR=/tmp/tmp.0Lr2JClYJq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fPuaEG4iVk ++ cat /tmp/tmp.0Lr2JClYJq ++ rm /tmp/tmp.fPuaEG4iVk /tmp/tmp.0Lr2JClYJq ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-8448ddf579-96dlf ++ mktemp + local LAST_OUT=/tmp/tmp.b8Q1rPbjln ++ mktemp + local LAST_ERR=/tmp/tmp.8idXnLkMTF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-8448ddf579-96dlf + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.b8Q1rPbjln + cat /tmp/tmp.8idXnLkMTF + rm /tmp/tmp.b8Q1rPbjln /tmp/tmp.8idXnLkMTF + return 0 2024-05-14T17:38:34.854Z INFO setup Manager starting up {"gitCommit": "fb797906018592e5c770209e9ccb265e596c46c7", "gitBranch": "PR-1710-fb797906", "buildTime": "2024-05-14T15:32:24Z", "goVersion": "go1.22.3", "os": "linux", "arch": "amd64"} 2024-05-14T17:38:34.854Z INFO setup Registering Components. 2024-05-14T17:38:34.854Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1300000"} 2024-05-14T17:38:36.534Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-05-14T17:38:36.537Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-05-14T17:38:36.537Z INFO controller-runtime.metrics Starting metrics server 2024-05-14T17:38:36.537Z INFO controller-runtime.webhook Starting webhook server 2024-05-14T17:38:36.537Z INFO setup Starting the Cmd. 2024-05-14T17:38:36.537Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-05-14T17:38:36.538Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-05-14T17:38:36.538Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-05-14T17:38:36.538Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-05-14T17:38:36.639Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-05-14T17:38:36.664Z DEBUG events percona-xtradb-cluster-operator-8448ddf579-96dlf_e47a3f72-db18-47c3-b351-e9893913e55e became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"78f45816-6207-4b90-9c7b-ecfb0592295d","apiVersion":"coordination.k8s.io/v1","resourceVersion":"67796"}, "reason": "LeaderElection"} 2024-05-14T17:38:36.664Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: unknown type"} 2024-05-14T17:38:36.664Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: unknown type"} 2024-05-14T17:38:36.664Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-05-14T17:38:36.665Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-05-14T17:38:36.665Z INFO Starting Controller {"controller": "pxc-controller"} 2024-05-14T17:38:36.665Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-05-14T17:38:36.665Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: unknown type"} 2024-05-14T17:38:36.769Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-05-14T17:38:36.769Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-05-14T17:38:36.775Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-05-14T17:39:08.240Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "e26417a5-e94e-42d8-a834-c0f390803f62", "version": "1.15.0"} 2024-05-14T17:40:23.700Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "3ad38d20-ab31-4571-9bd7-2652ebff15e9", "user": "operator"} 2024-05-14T17:40:23.737Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "3ad38d20-ab31-4571-9bd7-2652ebff15e9", "user": "monitor"} 2024-05-14T17:40:23.837Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "3ad38d20-ab31-4571-9bd7-2652ebff15e9"} 2024-05-14T17:40:23.876Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "3ad38d20-ab31-4571-9bd7-2652ebff15e9"} 2024-05-14T17:40:23.922Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "3ad38d20-ab31-4571-9bd7-2652ebff15e9", "user": "xtrabackup"} 2024-05-14T17:40:23.986Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "3ad38d20-ab31-4571-9bd7-2652ebff15e9"} 2024-05-14T17:40:24.022Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "3ad38d20-ab31-4571-9bd7-2652ebff15e9", "user": "replication"} 2024-05-14T17:40:24.177Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "3ad38d20-ab31-4571-9bd7-2652ebff15e9", "err": "get primary pxc pod: not found"} 2024-05-14T17:40:28.829Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "9fbb5200-7821-4f92-93ae-f08766816abd", "err": "get primary pxc pod: not found"} 2024-05-14T17:40:34.132Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "0932f7bb-0a27-4ec0-8514-3883d1c698be", "err": "get primary pxc pod: not found"} 2024-05-14T17:40:39.391Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "dd60e2f0-24b9-4e5c-a063-a00b2b16e3c0", "err": "get primary pxc pod: not found"} 2024-05-14T17:42:54.442Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1f7200e5-90c0-4a63-96f9-0d6c6f1348e1", "user": "root"} 2024-05-14T17:42:54.903Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1f7200e5-90c0-4a63-96f9-0d6c6f1348e1", "new version": "8.0.36-28.1"} 2024-05-14T17:42:58.006Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1f7200e5-90c0-4a63-96f9-0d6c6f1348e1"} 2024-05-14T17:43:02.681Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "788261de-6005-460b-bfeb-08f479975de3"} 2024-05-14T17:43:08.341Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "65a4f04a-d99e-4b4c-8607-3df3fdb6ccb7"} 2024-05-14T17:43:14.441Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "e9754c35-c7a4-4bd5-9439-938b77906f27"} 2024-05-14T17:43:19.050Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1faf430f-972a-47a7-8aae-ae12f9c74a69"} 2024-05-14T17:43:24.468Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "13778454-f24e-41b0-a3b3-3923c76bf0e3"} 2024-05-14T17:43:29.847Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "eb79fad3-a3a0-43eb-971c-1a8cdcdbc25c"} 2024-05-14T17:43:35.333Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "9d40f936-1c0f-4268-8b9e-b4a3b09ab273"} 2024-05-14T17:43:40.564Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "4335f580-eeb4-40f7-8df8-0e93bf4367af"} 2024-05-14T17:43:45.939Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "29ae4779-245d-4bd3-a989-dfa0feba7150"} 2024-05-14T17:43:51.456Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "824207d7-9ad4-4559-bf1c-ebf49307c52e"} 2024-05-14T17:43:56.848Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "7e278c62-33e1-4573-b365-d43b3e9a4a32"} 2024-05-14T17:43:58.920Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a9a89611-ca9c-4cb3-9cac-5ee0412ed887", "user": "root"} 2024-05-14T17:43:58.963Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a9a89611-ca9c-4cb3-9cac-5ee0412ed887", "user": "root"} 2024-05-14T17:43:58.999Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a9a89611-ca9c-4cb3-9cac-5ee0412ed887", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T17:44:04.594Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a9a89611-ca9c-4cb3-9cac-5ee0412ed887"} 2024-05-14T17:44:04.610Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a9a89611-ca9c-4cb3-9cac-5ee0412ed887", "user": "root"} 2024-05-14T17:44:04.661Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a9a89611-ca9c-4cb3-9cac-5ee0412ed887", "user": "root"} 2024-05-14T17:44:08.243Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a9a89611-ca9c-4cb3-9cac-5ee0412ed887"} 2024-05-14T17:44:14.245Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "eff9e2be-2313-405f-8d63-790a46bd3fb0"} 2024-05-14T17:44:18.693Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "c8cee389-a59c-4859-bb72-cf113f8e5488", "error": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:44:35.699Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a8203cf8-4c20-4300-8624-474e35748c4c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:44:40.672Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "9053f32b-3041-43d7-b687-9d23bcfb3789", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:44:42.231Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "7d47915c-c14d-48b4-ac41-c00a30f5f40b", "user": "proxyadmin"} 2024-05-14T17:44:42.231Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "7d47915c-c14d-48b4-ac41-c00a30f5f40b", "user": "proxyadmin"} 2024-05-14T17:44:42.298Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "7d47915c-c14d-48b4-ac41-c00a30f5f40b", "user": "proxyadmin"} 2024-05-14T17:44:42.367Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "7d47915c-c14d-48b4-ac41-c00a30f5f40b", "user": "proxyadmin"} 2024-05-14T17:44:42.367Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "7d47915c-c14d-48b4-ac41-c00a30f5f40b", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-05-14T17:44:42.795Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "7d47915c-c14d-48b4-ac41-c00a30f5f40b", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:45:04.504Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "62310691-b784-4f97-8d92-5f1e32f44040", "err": "get primary pxc pod: not found"} 2024-05-14T17:45:14.774Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "5b8ec7e7-8178-45ed-8ff2-0b8448f09ec6", "err": "get primary pxc pod: not found"} 2024-05-14T17:45:33.262Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "11b2d3ea-5b52-4aff-b050-02067e752e6c"} 2024-05-14T17:45:38.186Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "fa827c50-99c3-4104-b4fb-23b761d184c7"} 2024-05-14T17:45:43.486Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "8562e32a-da8e-448c-8f12-b8be1003de9d"} 2024-05-14T17:45:49.155Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "961eb20f-da79-4b0b-a243-36e36ca22bea"} 2024-05-14T17:45:52.924Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "b169b2e0-90b5-4884-aac7-963f9968165c", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:45:58.679Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "eb980bc3-bc5b-48a3-b09b-3802adc0df3e", "user": "xtrabackup"} 2024-05-14T17:45:58.705Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "eb980bc3-bc5b-48a3-b09b-3802adc0df3e", "user": "xtrabackup"} 2024-05-14T17:45:58.722Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "eb980bc3-bc5b-48a3-b09b-3802adc0df3e", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T17:45:58.747Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "eb980bc3-bc5b-48a3-b09b-3802adc0df3e", "user": "xtrabackup"} 2024-05-14T17:45:58.773Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "eb980bc3-bc5b-48a3-b09b-3802adc0df3e", "user": "xtrabackup"} 2024-05-14T17:45:58.782Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "eb980bc3-bc5b-48a3-b09b-3802adc0df3e", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-05-14T17:46:03.243Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "eb980bc3-bc5b-48a3-b09b-3802adc0df3e"} 2024-05-14T17:46:07.635Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "d226ac6d-73d1-465a-902e-a648ee9c6711"} 2024-05-14T17:47:03.063Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "50628f73-1d0b-4700-ad04-608e67af39b4", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-16979 on 10.157.64.10:53: no such host"} 2024-05-14T17:47:08.388Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "c01c9aa7-0a71-4d3b-9b08-6b62994a0d88", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-16979 on 10.157.64.10:53: no such host"} 2024-05-14T17:47:56.172Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "4ef3611b-9837-40ab-8a0a-e4024099721d", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.109.88.64:33062: connect: connection refused"} 2024-05-14T17:48:01.459Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "64951a1e-a314-4f4d-bdbb-a8b6f5e15df8", "primary name": "some-name-pxc-0.some-name-pxc.users-16979.svc.cluster.local"} 2024-05-14T17:48:06.952Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "de4ecfaa-7f54-4ef1-8cdf-da452a5e42dc", "primary name": "some-name-pxc-0.some-name-pxc.users-16979.svc.cluster.local"} 2024-05-14T17:48:12.940Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "06c6462f-8e01-4f03-8bd3-18a31f9e1bbe", "primary name": "some-name-pxc-0.some-name-pxc.users-16979.svc.cluster.local"} 2024-05-14T17:48:37.793Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a33e5aa7-62bc-467f-b1a3-91cab67bf249"} 2024-05-14T17:48:42.761Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "42f6ea31-8236-47d1-bc17-66ac240f8982"} 2024-05-14T17:48:48.391Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "19d3866a-f623-4585-b03b-f5e42208233f"} 2024-05-14T17:48:50.441Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "071c8674-87d9-4101-9931-836aee952aff", "user": "monitor"} 2024-05-14T17:48:50.471Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "071c8674-87d9-4101-9931-836aee952aff", "user": "monitor"} 2024-05-14T17:48:50.494Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "071c8674-87d9-4101-9931-836aee952aff", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T17:48:50.546Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "071c8674-87d9-4101-9931-836aee952aff", "user": "monitor"} 2024-05-14T17:48:50.561Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "071c8674-87d9-4101-9931-836aee952aff", "user": "monitor"} 2024-05-14T17:48:50.665Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "071c8674-87d9-4101-9931-836aee952aff", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-05-14T17:48:53.940Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "071c8674-87d9-4101-9931-836aee952aff", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:49:24.007Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a38298f3-14a3-4515-8164-6ee2c153177b", "user": "monitor"} 2024-05-14T17:49:26.381Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a38298f3-14a3-4515-8164-6ee2c153177b", "error": "exec syncusers: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-16979.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: monitor\nAdding user to ProxySQL: monitor\n / ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:715) : Failed to add the user (monitor) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-16979.svc.cluster.local:3306) to ProxySQL\nRemoving existing user from ProxySQL: monitor\nAdding user to ProxySQL: monitor\n / ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:715) : Failed to add the user (monitor) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:49:29.012Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "ad3e9987-c4ce-466c-9eab-7bf3f69bcc30", "user": "monitor"} 2024-05-14T17:49:34.472Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "456f2b27-c509-4558-b415-b77e3fb67ad3", "user": "monitor"} 2024-05-14T17:49:34.544Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "ad3e9987-c4ce-466c-9eab-7bf3f69bcc30"} 2024-05-14T17:49:37.972Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "456f2b27-c509-4558-b415-b77e3fb67ad3"} 2024-05-14T17:49:39.905Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "9f3a0f9e-7ca0-4fca-a9ae-e1defcf0b775", "user": "monitor"} 2024-05-14T17:49:43.458Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "9f3a0f9e-7ca0-4fca-a9ae-e1defcf0b775"} 2024-05-14T17:49:45.415Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "d5b9e8a4-e5c6-4153-9eaf-252f61ebb7b4", "user": "monitor"} 2024-05-14T17:49:49.250Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "d5b9e8a4-e5c6-4153-9eaf-252f61ebb7b4"} 2024-05-14T17:49:51.359Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "3281b939-f883-4bf1-98db-55aafdd5e87e", "user": "monitor"} 2024-05-14T17:49:54.880Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "3281b939-f883-4bf1-98db-55aafdd5e87e"} 2024-05-14T17:49:56.845Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "d3c05e36-acf4-4737-aa1b-1443f92dd457", "user": "monitor"} 2024-05-14T17:49:58.426Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "d3c05e36-acf4-4737-aa1b-1443f92dd457", "user": "monitor"} 2024-05-14T17:49:58.448Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "d3c05e36-acf4-4737-aa1b-1443f92dd457", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-05-14T17:50:01.990Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "d3c05e36-acf4-4737-aa1b-1443f92dd457"} 2024-05-14T17:50:07.352Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "b2913f6b-1ce9-434e-89d0-159044436ab2"} 2024-05-14T17:50:12.773Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "c2ec56fa-3356-4d62-a518-f10ed4eb8959"} 2024-05-14T17:50:18.084Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1a1dd5db-6ec5-4de1-8f1a-cfecbfe90c5e"} 2024-05-14T17:50:23.465Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "7d634f55-b509-4c3a-b66b-3bc0a631f85a"} 2024-05-14T17:50:25.403Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "dbe3b589-221a-4175-bc8d-b75308388f9f", "user": "operator"} 2024-05-14T17:50:25.435Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "dbe3b589-221a-4175-bc8d-b75308388f9f", "user": "operator"} 2024-05-14T17:50:25.445Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "dbe3b589-221a-4175-bc8d-b75308388f9f", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T17:50:25.455Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "dbe3b589-221a-4175-bc8d-b75308388f9f", "user": "operator"} 2024-05-14T17:50:25.487Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "dbe3b589-221a-4175-bc8d-b75308388f9f", "user": "operator"} 2024-05-14T17:50:25.517Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "dbe3b589-221a-4175-bc8d-b75308388f9f", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T17:50:27.193Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "dbe3b589-221a-4175-bc8d-b75308388f9f", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16979.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16979.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16979.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16979.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16979.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16979.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:51:01.097Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "76c6b26d-0985-4781-a8c9-bee528304f37"} 2024-05-14T17:51:09.436Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "0214dc96-01aa-4de1-87b2-8bbcc4928abe"} 2024-05-14T17:51:12.590Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "000387cb-2006-41ef-95d8-45e01764d227", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:1297) : Could not find any nodes belonging to the cluster with writer hostgroup:11\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:1297) : Could not find any nodes belonging to the cluster with writer hostgroup:11\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:51:20.264Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "28afdbf9-ac5a-44a6-b709-99b2543ed6a5"} 2024-05-14T17:51:25.944Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "00214d9e-0e5f-4501-954a-2bc8b24fd161"} 2024-05-14T17:51:26.198Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "secrets": "my-cluster-secrets-2"} 2024-05-14T17:51:26.210Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "root"} 2024-05-14T17:51:26.251Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "root"} 2024-05-14T17:51:26.268Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T17:51:30.989Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958"} 2024-05-14T17:51:31.002Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "root"} 2024-05-14T17:51:31.061Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "root"} 2024-05-14T17:51:31.077Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "operator"} 2024-05-14T17:51:31.112Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "operator"} 2024-05-14T17:51:31.124Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T17:51:31.140Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "operator"} 2024-05-14T17:51:31.165Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "operator"} 2024-05-14T17:51:31.177Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "monitor"} 2024-05-14T17:51:31.208Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "monitor"} 2024-05-14T17:51:31.223Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T17:51:31.269Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "monitor"} 2024-05-14T17:51:31.287Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "monitor"} 2024-05-14T17:51:31.375Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "xtrabackup"} 2024-05-14T17:51:31.410Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "xtrabackup"} 2024-05-14T17:51:31.423Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T17:51:31.449Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "xtrabackup"} 2024-05-14T17:51:31.473Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "xtrabackup"} 2024-05-14T17:51:31.485Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "replication"} 2024-05-14T17:51:31.513Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "replication"} 2024-05-14T17:51:31.532Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-14T17:51:31.544Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "replication"} 2024-05-14T17:51:31.576Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "replication"} 2024-05-14T17:51:31.576Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "proxyadmin"} 2024-05-14T17:51:31.625Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "proxyadmin"} 2024-05-14T17:51:31.639Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "user": "proxyadmin"} 2024-05-14T17:51:31.639Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "last-applied-secret": "3cd8b1ce2b53395099c4e050aef37588ecbd72b1307a2721faef944013af0fcc"} 2024-05-14T17:51:31.639Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "last-applied-secret": "3cd8b1ce2b53395099c4e050aef37588ecbd72b1307a2721faef944013af0fcc"} 2024-05-14T17:51:31.975Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "1084deda-f5d0-4bc5-aaec-b73c5ba24958", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:52:20.519Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "720ed806-9ddf-460d-a32b-dabb92e3332f", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-16979 on 10.157.64.10:53: no such host"} 2024-05-14T17:52:25.435Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "539aa613-479b-474e-9c34-06bdfd75e683", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-16979 on 10.157.64.10:53: no such host"} 2024-05-14T17:52:30.799Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a9e59f75-aebb-4947-a6d6-9fbf598b9c3e", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-16979 on 10.157.64.10:53: no such host"} 2024-05-14T17:53:56.240Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "c202ce81-0a6d-4b23-a52e-9f9262c3a6ed", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-16979: i/o timeout"} 2024-05-14T17:54:37.505Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "9f44d7e3-f1fc-4d06-9034-a01f41c34090", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-16979: i/o timeout"} 2024-05-14T17:56:14.648Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "5ccb87b7-ca90-472e-8c3f-1f1e4b90cd3a", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.109.88.67:33062: connect: connection refused"} 2024-05-14T17:56:19.901Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "e020cdf9-3f86-4514-b13d-fbbe3f31dcbb", "primary name": "some-name-pxc-0.some-name-pxc.users-16979.svc.cluster.local"} 2024-05-14T17:56:58.358Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "6cd48f23-d662-4870-a3da-65247b555b12", "user": "monitor"} 2024-05-14T17:56:58.679Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "6cd48f23-d662-4870-a3da-65247b555b12", "user": "monitor"} 2024-05-14T17:56:58.716Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "6cd48f23-d662-4870-a3da-65247b555b12", "last-applied-secret": "3cd8b1ce2b53395099c4e050aef37588ecbd72b1307a2721faef944013af0fcc"} 2024-05-14T17:57:02.257Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "6cd48f23-d662-4870-a3da-65247b555b12"} 2024-05-14T17:57:06.747Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "944447e4-6332-4c80-a973-d2868518a789"} 2024-05-14T17:57:12.582Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "ec1d7dd6-e219-4542-bc3f-af44188a1d43"} 2024-05-14T17:57:17.851Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "aa4ec25a-5579-477d-87f2-add16feccfbd"} 2024-05-14T17:57:23.243Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "bc49c10e-f8e4-4242-9284-fe8fb3b00a0c"} 2024-05-14T17:57:31.044Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "f0badf1d-2bfc-4a68-8a72-127aeadbacf6", "user": "operator"} 2024-05-14T17:57:31.078Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "f0badf1d-2bfc-4a68-8a72-127aeadbacf6", "user": "operator"} 2024-05-14T17:57:31.089Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "f0badf1d-2bfc-4a68-8a72-127aeadbacf6", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T17:57:31.103Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "f0badf1d-2bfc-4a68-8a72-127aeadbacf6", "user": "operator"} 2024-05-14T17:57:31.138Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "f0badf1d-2bfc-4a68-8a72-127aeadbacf6", "user": "operator"} 2024-05-14T17:57:31.182Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "f0badf1d-2bfc-4a68-8a72-127aeadbacf6", "last-applied-secret": "c78ff71ef448118933f95562dac66e6a029ceb1c35a5151685aee53dbd86dd36"} 2024-05-14T17:57:32.563Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "f0badf1d-2bfc-4a68-8a72-127aeadbacf6", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16979.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16979.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16979.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-16979.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T17:58:21.445Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a8018aa2-56c2-429b-b342-e51d43be2f3c"} 2024-05-14T17:58:27.270Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "4c6abb03-385a-47ce-96e2-4d4012e00e6d"} 2024-05-14T17:58:32.528Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "f1dc47ec-2c8d-444c-bdce-485d21ef95ad"} 2024-05-14T17:58:37.654Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "264a425f-ab5c-4f13-845b-ab7bf8514393"} 2024-05-14T17:58:43.126Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "73762901-635a-46b1-9e9b-0a7d5126924e"} 2024-05-14T17:58:48.724Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "357ba7db-2817-4fd1-a855-43f1ed25444e"} 2024-05-14T17:58:54.125Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "0bc33a2b-06bb-421e-97ca-cff1c96df78b"} 2024-05-14T17:59:00.843Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "9b0e2756-2d52-49c1-9f34-2ae67894bc1d"} 2024-05-14T17:59:05.828Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63110d23-b59a-455d-9bc7-83f72bdc24c7"} 2024-05-14T17:59:10.220Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "0c861965-10b6-4d84-8573-9b4040c94e75"} 2024-05-14T17:59:16.559Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a0465b0d-074c-4d0a-bc5d-0e0254404a64"} 2024-05-14T17:59:22.148Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "7da63269-d052-4f5f-b16e-49bdbe247f96"} 2024-05-14T17:59:27.634Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "94f792b4-7fa6-49d3-ab6a-57c4d6aaf574"} 2024-05-14T17:59:32.921Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "d903ae93-c2e3-4106-873c-0ce9f2df0fa9"} 2024-05-14T17:59:38.431Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "5a22230c-f895-45db-a9be-e9a3ce8ff62e"} 2024-05-14T17:59:43.772Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "103d7fab-c6c3-42b5-9fa0-8380faed5bf0"} 2024-05-14T17:59:45.603Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "root"} 2024-05-14T17:59:45.656Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "root"} 2024-05-14T17:59:45.674Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T17:59:51.169Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae"} 2024-05-14T17:59:51.184Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "root"} 2024-05-14T17:59:51.230Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "root"} 2024-05-14T17:59:51.257Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "monitor"} 2024-05-14T17:59:51.289Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "monitor"} 2024-05-14T17:59:51.305Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T17:59:51.353Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "monitor"} 2024-05-14T17:59:51.364Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "monitor"} 2024-05-14T17:59:51.449Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "xtrabackup"} 2024-05-14T17:59:51.484Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "xtrabackup"} 2024-05-14T17:59:51.502Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T17:59:51.513Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "xtrabackup"} 2024-05-14T17:59:51.551Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "xtrabackup"} 2024-05-14T17:59:51.564Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "proxyadmin"} 2024-05-14T17:59:51.612Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "proxyadmin"} 2024-05-14T17:59:51.632Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "user": "proxyadmin"} 2024-05-14T17:59:51.632Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "last-applied-secret": "f72133752cfde13d26ef21f5abff2be2b801e7f3a780d9ed27193192289e108c"} 2024-05-14T17:59:51.632Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "last-applied-secret": "f72133752cfde13d26ef21f5abff2be2b801e7f3a780d9ed27193192289e108c"} 2024-05-14T17:59:51.911Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "32f0cf28-bdb2-4c42-bcb6-6a5807cfb9ae", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-14T18:00:06.584Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 2321a71a-a673-4221-962c-615080e9985d 2024-05-14T18:00:51.346Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "2eebae52-b759-45c3-b50b-512d75678971", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-16979 on 10.157.64.10:53: no such host"} 2024-05-14T18:01:45.126Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "8fbd0253-216e-46d3-91fb-b5c8d0dc8e98", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-16979 on 10.157.64.10:53: no such host"} 2024-05-14T18:02:34.777Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "root"} 2024-05-14T18:02:34.826Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "root"} 2024-05-14T18:02:34.842Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "secret": "some-name-mysql-init", "user": "root"} 2024-05-14T18:02:34.859Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "root"} 2024-05-14T18:02:34.910Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "root"} 2024-05-14T18:02:34.921Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "operator"} 2024-05-14T18:02:34.950Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "operator"} 2024-05-14T18:02:34.966Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-14T18:02:34.980Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "operator"} 2024-05-14T18:02:35.009Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "operator"} 2024-05-14T18:02:35.019Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "monitor"} 2024-05-14T18:02:35.058Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "monitor"} 2024-05-14T18:02:35.070Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T18:02:35.082Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "monitor"} 2024-05-14T18:02:35.192Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "xtrabackup"} 2024-05-14T18:02:35.223Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "xtrabackup"} 2024-05-14T18:02:35.235Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-14T18:02:35.247Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "xtrabackup"} 2024-05-14T18:02:35.275Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "xtrabackup"} 2024-05-14T18:02:35.291Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "replication"} 2024-05-14T18:02:35.324Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "replication"} 2024-05-14T18:02:35.338Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-14T18:02:35.351Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "replication"} 2024-05-14T18:02:35.382Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "user": "replication"} 2024-05-14T18:02:35.382Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T18:02:35.382Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "63c26aea-1a74-4d67-9ca5-63482899b97c", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T18:03:53.972Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "88ea7e80-a046-4237-a056-11ea16fa1880", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.109.89.8:33062: i/o timeout"} 2024-05-14T18:05:09.036Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "6b891ade-ab51-4e5f-a0bd-026ef10e7825", "user": "monitor"} 2024-05-14T18:05:09.394Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "6b891ade-ab51-4e5f-a0bd-026ef10e7825", "user": "monitor"} 2024-05-14T18:05:09.427Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "6b891ade-ab51-4e5f-a0bd-026ef10e7825", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-14T18:05:19.410Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "e74f1506-2c34-4c4b-a030-160e7e03527b", "user": "monitor"} 2024-05-14T18:05:19.444Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "e74f1506-2c34-4c4b-a030-160e7e03527b", "user": "monitor"} 2024-05-14T18:05:19.454Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "e74f1506-2c34-4c4b-a030-160e7e03527b", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-14T18:05:19.470Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "e74f1506-2c34-4c4b-a030-160e7e03527b", "user": "monitor"} 2024-05-14T18:05:19.593Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "e74f1506-2c34-4c4b-a030-160e7e03527b", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} 2024-05-14T18:06:26.780Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a132f821-8db2-4ff7-af3c-5f21f1fa2584", "user": "monitor"} 2024-05-14T18:06:28.309Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a132f821-8db2-4ff7-af3c-5f21f1fa2584", "user": "monitor"} 2024-05-14T18:06:28.336Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-16979", "name": "some-name", "reconcileID": "a132f821-8db2-4ff7-af3c-5f21f1fa2584", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-16979 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Uxr3UFhMlh ++ mktemp + local LAST_ERR=/tmp/tmp.hnQZR4ohyQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Uxr3UFhMlh perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.hnQZR4ohyQ + rm /tmp/tmp.Uxr3UFhMlh /tmp/tmp.hnQZR4ohyQ + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.aN883u8JLJ ++ mktemp + local LAST_ERR=/tmp/tmp.Vam6Q89iCx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aN883u8JLJ No resources found + cat /tmp/tmp.Vam6Q89iCx + rm /tmp/tmp.aN883u8JLJ /tmp/tmp.Vam6Q89iCx + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.nAe0DKjxvY ++ mktemp + local LAST_ERR=/tmp/tmp.zGGZaK2j7R + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nAe0DKjxvY No resources found + cat /tmp/tmp.zGGZaK2j7R + rm /tmp/tmp.nAe0DKjxvY /tmp/tmp.zGGZaK2j7R + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.S9PpshI15m ++ mktemp + local LAST_ERR=/tmp/tmp.u7CMRPyLFR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.S9PpshI15m validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.u7CMRPyLFR + rm /tmp/tmp.S9PpshI15m /tmp/tmp.u7CMRPyLFR + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-16979 + rm -rf /tmp/tmp.8X6qenpyRR + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.RtFmOsXbB1 ++ mktemp + local LAST_OUT=/tmp/tmp.xaL594ds7P + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_ERR=/tmp/tmp.bgKyfNZ0rc + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.XvC21dQl92 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-16979 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator