Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/logs/users-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-18643 + local ns=users-18643 + '[' -n pxc-operator ']' + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-13449 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.M64TPv4gmQ ++ mktemp + local LAST_ERR=/tmp/tmp.KYjP9061VU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.M64TPv4gmQ perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.KYjP9061VU + rm /tmp/tmp.M64TPv4gmQ /tmp/tmp.KYjP9061VU + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.KcSCPBY0y6 ++ mktemp + local LAST_ERR=/tmp/tmp.ic1uBy87tx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KcSCPBY0y6 No resources found + cat /tmp/tmp.ic1uBy87tx + rm /tmp/tmp.KcSCPBY0y6 /tmp/tmp.ic1uBy87tx + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.pLnsC0HPCk ++ mktemp + local LAST_ERR=/tmp/tmp.9weiXAiWY5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pLnsC0HPCk No resources found + cat /tmp/tmp.9weiXAiWY5 + rm /tmp/tmp.pLnsC0HPCk /tmp/tmp.9weiXAiWY5 + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' ++ mktemp + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + local LAST_OUT=/tmp/tmp.rXQnU3LHcD + xargs kubectl delete ns ++ mktemp + local LAST_OUT=/tmp/tmp.PBM7iXr7kj ++ mktemp + local LAST_ERR=/tmp/tmp.3HtNleEtYH + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.udw5gbzgR4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rXQnU3LHcD + cat /tmp/tmp.3HtNleEtYH + rm /tmp/tmp.rXQnU3LHcD /tmp/tmp.3HtNleEtYH + return 0 namespace "users-13449" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.PBM7iXr7kj namespace "pxc-operator" deleted + cat /tmp/tmp.udw5gbzgR4 + rm /tmp/tmp.PBM7iXr7kj /tmp/tmp.udw5gbzgR4 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.a6LaX04JxS ++ mktemp + local LAST_ERR=/tmp/tmp.wGEOiyHd3A + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.a6LaX04JxS namespace/pxc-operator created + cat /tmp/tmp.wGEOiyHd3A + rm /tmp/tmp.a6LaX04JxS /tmp/tmp.wGEOiyHd3A + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.3ruk9HFchJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.VxWJ5yT8nK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3ruk9HFchJ ++ cat /tmp/tmp.VxWJ5yT8nK ++ rm /tmp/tmp.3ruk9HFchJ /tmp/tmp.VxWJ5yT8nK ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1721-8dedf6d8-2-cluster4 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.o1QVqS83Tq ++ mktemp + local LAST_ERR=/tmp/tmp.ZEaDqEfNL1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1721-8dedf6d8-2-cluster4 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.o1QVqS83Tq Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1721-8dedf6d8-2-cluster4" modified. + cat /tmp/tmp.ZEaDqEfNL1 + rm /tmp/tmp.o1QVqS83Tq /tmp/tmp.ZEaDqEfNL1 + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.NbGRyzjRsw ++ mktemp + local LAST_ERR=/tmp/tmp.gRFhO8Y0uA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NbGRyzjRsw customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.gRFhO8Y0uA + rm /tmp/tmp.NbGRyzjRsw /tmp/tmp.gRFhO8Y0uA + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.HhjrPgJIv5 ++ mktemp + local LAST_ERR=/tmp/tmp.0pudEHK9Jp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HhjrPgJIv5 clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.0pudEHK9Jp + rm /tmp/tmp.HhjrPgJIv5 /tmp/tmp.0pudEHK9Jp + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1721-8dedf6d8^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + kubectl_bin apply -f - ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + local LAST_OUT=/tmp/tmp.WfwwZcb6L4 ++ mktemp + local LAST_ERR=/tmp/tmp.kHyQpKWZCY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WfwwZcb6L4 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.kHyQpKWZCY + rm /tmp/tmp.WfwwZcb6L4 /tmp/tmp.kHyQpKWZCY + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.GL92zCPEil ++ mktemp + local LAST_ERR=/tmp/tmp.TLl0Bp8EZP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GL92zCPEil pod/percona-xtradb-cluster-operator-678dd8bcd4-vbnm5 condition met + cat /tmp/tmp.TLl0Bp8EZP + rm /tmp/tmp.GL92zCPEil /tmp/tmp.TLl0Bp8EZP + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.dpYd7ysUaK +++ mktemp ++ local LAST_ERR=/tmp/tmp.j6bUQ1n0AN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dpYd7ysUaK ++ cat /tmp/tmp.j6bUQ1n0AN ++ rm /tmp/tmp.dpYd7ysUaK /tmp/tmp.j6bUQ1n0AN ++ return 0 + wait_pod percona-xtradb-cluster-operator-678dd8bcd4-vbnm5 480 pxc-operator + local pod=percona-xtradb-cluster-operator-678dd8bcd4-vbnm5 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-678dd8bcd4-vbnm5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-678dd8bcd4-vbnm5 condition met percona-xtradb-cluster-operator-678dd8bcd4-vbnm5.Ok + sleep 3 + create_namespace users-18643 + local namespace=users-18643 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// ++ tail -n1 + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-18643' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-18643 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-18643 + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.OPyhVBDAcC + local LAST_OUT=/tmp/tmp.wq9jimieLJ ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.vxANaNdBla + local exit_status=0 + local LAST_ERR=/tmp/tmp.dvelinzuQH + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18643 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OPyhVBDAcC + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + cat /tmp/tmp.vxANaNdBla + sleep 0 + rm /tmp/tmp.OPyhVBDAcC /tmp/tmp.vxANaNdBla + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18643 + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-18643 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.wq9jimieLJ + cat /tmp/tmp.dvelinzuQH Error from server (NotFound): namespaces "users-18643" not found + rm /tmp/tmp.wq9jimieLJ /tmp/tmp.dvelinzuQH + return 1 + : + wait_for_delete namespace/users-18643 + local res=namespace/users-18643 + echo -n 'namespace/users-18643 - ' namespace/users-18643 - + set +o xtrace Error from server (NotFound): namespaces "users-18643" not found + desc 'create namespace users-18643' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-18643 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-18643 ++ mktemp + local LAST_OUT=/tmp/tmp.hnklXyThVy ++ mktemp + local LAST_ERR=/tmp/tmp.jKiD1XkVrJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-18643 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hnklXyThVy namespace/users-18643 created + cat /tmp/tmp.jKiD1XkVrJ + rm /tmp/tmp.hnklXyThVy /tmp/tmp.jKiD1XkVrJ + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.O6ASs3Zzsp +++ mktemp ++ local LAST_ERR=/tmp/tmp.INv7LymQVU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.O6ASs3Zzsp ++ cat /tmp/tmp.INv7LymQVU ++ rm /tmp/tmp.O6ASs3Zzsp /tmp/tmp.INv7LymQVU ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1721-8dedf6d8-2-cluster4 --namespace=users-18643 ++ mktemp + local LAST_OUT=/tmp/tmp.8L3xD8ZqjL ++ mktemp + local LAST_ERR=/tmp/tmp.Wv3aKNriDh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1721-8dedf6d8-2-cluster4 --namespace=users-18643 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8L3xD8ZqjL Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1721-8dedf6d8-2-cluster4" modified. + cat /tmp/tmp.Wv3aKNriDh + rm /tmp/tmp.8L3xD8ZqjL /tmp/tmp.Wv3aKNriDh + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.RdSBgf7BUl ++ mktemp + local LAST_ERR=/tmp/tmp.Ey23a4FG76 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RdSBgf7BUl secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.Ey23a4FG76 + rm /tmp/tmp.RdSBgf7BUl /tmp/tmp.Ey23a4FG76 + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.9ssihm8Pxq ++ mktemp + local LAST_ERR=/tmp/tmp.ZrRiDeUHx9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.9ssihm8Pxq secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.ZrRiDeUHx9 + rm /tmp/tmp.9ssihm8Pxq /tmp/tmp.ZrRiDeUHx9 + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1721-8dedf6d8#' + local LAST_OUT=/tmp/tmp.GOHuxEurIq + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_ERR=/tmp/tmp.AWWtEA551o + local exit_status=0 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18643~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GOHuxEurIq deployment.apps/pxc-client created + cat /tmp/tmp.AWWtEA551o + rm /tmp/tmp.GOHuxEurIq /tmp/tmp.AWWtEA551o + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1721-8dedf6d8#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_OUT=/tmp/tmp.Utd983mPTx + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18643~ + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.BSCC0YCmxg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Utd983mPTx perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.BSCC0YCmxg + rm /tmp/tmp.Utd983mPTx /tmp/tmp.BSCC0YCmxg + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.qbl3DosBoE ++++ mktemp +++ local LAST_ERR=/tmp/tmp.YWTdCIco6q +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.qbl3DosBoE +++ cat /tmp/tmp.YWTdCIco6q +++ rm /tmp/tmp.qbl3DosBoE /tmp/tmp.YWTdCIco6q +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.5wmAquwKx2 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.r2RlmfuqVL +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.5wmAquwKx2 +++ cat /tmp/tmp.r2RlmfuqVL +++ rm /tmp/tmp.5wmAquwKx2 /tmp/tmp.r2RlmfuqVL +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18643 ++ mktemp + local LAST_OUT=/tmp/tmp.FwHrWhA3yw ++ mktemp + local LAST_ERR=/tmp/tmp.UzfeMI6qXP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18643 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18643 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-18643 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.FwHrWhA3yw + cat /tmp/tmp.UzfeMI6qXP error: no matching resources found + rm /tmp/tmp.FwHrWhA3yw /tmp/tmp.UzfeMI6qXP + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SPOEKe9uuo +++ mktemp ++ local LAST_ERR=/tmp/tmp.JN0YWJLTgR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SPOEKe9uuo ++ cat /tmp/tmp.JN0YWJLTgR ++ rm /tmp/tmp.SPOEKe9uuo /tmp/tmp.JN0YWJLTgR ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NI44ub5H2u +++ mktemp ++ local LAST_ERR=/tmp/tmp.CiQGOgSysR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NI44ub5H2u ++ cat /tmp/tmp.CiQGOgSysR ++ rm /tmp/tmp.NI44ub5H2u /tmp/tmp.CiQGOgSysR ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uY7HiJ8QKa +++ mktemp ++ local LAST_ERR=/tmp/tmp.esnE6bOGi7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uY7HiJ8QKa ++ cat /tmp/tmp.esnE6bOGi7 ++ rm /tmp/tmp.uY7HiJ8QKa /tmp/tmp.esnE6bOGi7 ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1.sql /tmp/tmp.kNkEqeqYPE/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VyrngxhYLf +++ mktemp ++ local LAST_ERR=/tmp/tmp.yAs30ffaO2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VyrngxhYLf ++ cat /tmp/tmp.yAs30ffaO2 ++ rm /tmp/tmp.VyrngxhYLf /tmp/tmp.yAs30ffaO2 ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1.sql /tmp/tmp.kNkEqeqYPE/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.udrOAAcqus +++ mktemp ++ local LAST_ERR=/tmp/tmp.8xRpdQ1kM3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.udrOAAcqus ++ cat /tmp/tmp.8xRpdQ1kM3 ++ rm /tmp/tmp.udrOAAcqus /tmp/tmp.8xRpdQ1kM3 ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-1.sql /tmp/tmp.kNkEqeqYPE/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F1oOX7FGtH +++ mktemp ++ local LAST_ERR=/tmp/tmp.vKlDfBHlcN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F1oOX7FGtH ++ cat /tmp/tmp.vKlDfBHlcN Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.F1oOX7FGtH /tmp/tmp.vKlDfBHlcN ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.eHos9zpW7c ++ mktemp + local LAST_ERR=/tmp/tmp.mbkgS5oskf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eHos9zpW7c secret/my-cluster-secrets patched + cat /tmp/tmp.mbkgS5oskf + rm /tmp/tmp.eHos9zpW7c /tmp/tmp.mbkgS5oskf + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I40V4LKvYo +++ mktemp ++ local LAST_ERR=/tmp/tmp.iEQdCfVOVB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I40V4LKvYo ++ cat /tmp/tmp.iEQdCfVOVB ++ rm /tmp/tmp.I40V4LKvYo /tmp/tmp.iEQdCfVOVB ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.kNkEqeqYPE/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.WR1t1pjo9a ++ mktemp + local LAST_ERR=/tmp/tmp.Yl7JMChhfI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WR1t1pjo9a perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.Yl7JMChhfI + rm /tmp/tmp.WR1t1pjo9a /tmp/tmp.Yl7JMChhfI + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZoiutsAiQn +++ mktemp ++ local LAST_ERR=/tmp/tmp.mUZIrkpWig ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZoiutsAiQn ++ cat /tmp/tmp.mUZIrkpWig ++ rm /tmp/tmp.ZoiutsAiQn /tmp/tmp.mUZIrkpWig ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o3VYVGGB6k +++ mktemp ++ local LAST_ERR=/tmp/tmp.rtLnU8reuB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o3VYVGGB6k ++ cat /tmp/tmp.rtLnU8reuB ++ rm /tmp/tmp.o3VYVGGB6k /tmp/tmp.rtLnU8reuB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kY13dtUpBf ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.0pHVwygChI +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kY13dtUpBf +++++ cat /tmp/tmp.0pHVwygChI +++++ rm /tmp/tmp.kY13dtUpBf /tmp/tmp.0pHVwygChI +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tZVUeWd7BP ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.GTKGSlMipu +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tZVUeWd7BP +++++ cat /tmp/tmp.GTKGSlMipu +++++ rm /tmp/tmp.tZVUeWd7BP /tmp/tmp.GTKGSlMipu +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZknjUcY6SE +++ mktemp ++ local LAST_ERR=/tmp/tmp.EYC1Fsg7B5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZknjUcY6SE ++ cat /tmp/tmp.EYC1Fsg7B5 ++ rm /tmp/tmp.ZknjUcY6SE /tmp/tmp.EYC1Fsg7B5 ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.1Yc0TICH74 ++ mktemp + local LAST_ERR=/tmp/tmp.ML3c9u7AFk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1Yc0TICH74 secret/my-cluster-secrets patched + cat /tmp/tmp.ML3c9u7AFk + rm /tmp/tmp.1Yc0TICH74 /tmp/tmp.ML3c9u7AFk + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VwB5SQHABC +++ mktemp ++ local LAST_ERR=/tmp/tmp.qWqSxo1MXO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VwB5SQHABC ++ cat /tmp/tmp.qWqSxo1MXO ++ rm /tmp/tmp.VwB5SQHABC /tmp/tmp.qWqSxo1MXO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.00na7Amomk +++ mktemp ++ local LAST_ERR=/tmp/tmp.soOgYjfHi3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.00na7Amomk ++ cat /tmp/tmp.soOgYjfHi3 ++ rm /tmp/tmp.00na7Amomk /tmp/tmp.soOgYjfHi3 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GE94T0yaoh +++ mktemp ++ local LAST_ERR=/tmp/tmp.mfiEGJuyci ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GE94T0yaoh ++ cat /tmp/tmp.mfiEGJuyci ++ rm /tmp/tmp.GE94T0yaoh /tmp/tmp.mfiEGJuyci ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.IHObE9yO1W ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.07g0URguQ0 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.IHObE9yO1W +++++ cat /tmp/tmp.07g0URguQ0 +++++ rm /tmp/tmp.IHObE9yO1W /tmp/tmp.07g0URguQ0 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.N7pwt0s4bW ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ICobl5VQ3m +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.N7pwt0s4bW +++++ cat /tmp/tmp.ICobl5VQ3m +++++ rm /tmp/tmp.N7pwt0s4bW /tmp/tmp.ICobl5VQ3m +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.faIBYZe9rS +++ mktemp ++ local LAST_ERR=/tmp/tmp.PLQZa7OFwF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.faIBYZe9rS ++ cat /tmp/tmp.PLQZa7OFwF ++ rm /tmp/tmp.faIBYZe9rS /tmp/tmp.PLQZa7OFwF ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2.sql /tmp/tmp.kNkEqeqYPE/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2.sql /tmp/tmp.kNkEqeqYPE/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-2.sql /tmp/tmp.kNkEqeqYPE/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ACpkDmyMTM ++ mktemp + local LAST_ERR=/tmp/tmp.WagOixCjQq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ACpkDmyMTM perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.WagOixCjQq + rm /tmp/tmp.ACpkDmyMTM /tmp/tmp.WagOixCjQq + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.8Cmt7NyUyy ++ mktemp + local LAST_ERR=/tmp/tmp.1LXyPo2IM4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8Cmt7NyUyy secret/my-cluster-secrets patched + cat /tmp/tmp.1LXyPo2IM4 + rm /tmp/tmp.8Cmt7NyUyy /tmp/tmp.1LXyPo2IM4 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.792MZqKAh1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DnkUe4JdBj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.792MZqKAh1 ++ cat /tmp/tmp.DnkUe4JdBj ++ rm /tmp/tmp.792MZqKAh1 /tmp/tmp.DnkUe4JdBj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.25w6p0nXSi +++ mktemp ++ local LAST_ERR=/tmp/tmp.7RexGaKT2l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.25w6p0nXSi ++ cat /tmp/tmp.7RexGaKT2l ++ rm /tmp/tmp.25w6p0nXSi /tmp/tmp.7RexGaKT2l ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h7qagjz50P +++ mktemp ++ local LAST_ERR=/tmp/tmp.0jxbbgEJPd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h7qagjz50P ++ cat /tmp/tmp.0jxbbgEJPd ++ rm /tmp/tmp.h7qagjz50P /tmp/tmp.0jxbbgEJPd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.u9wt5zkAes +++ mktemp ++ local LAST_ERR=/tmp/tmp.AO0chVN23K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u9wt5zkAes ++ cat /tmp/tmp.AO0chVN23K ++ rm /tmp/tmp.u9wt5zkAes /tmp/tmp.AO0chVN23K ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zOPphIZZ37 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yYvo63ldun ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zOPphIZZ37 ++ cat /tmp/tmp.yYvo63ldun ++ rm /tmp/tmp.zOPphIZZ37 /tmp/tmp.yYvo63ldun ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7Bfje5TIQG +++ mktemp ++ local LAST_ERR=/tmp/tmp.K62E2s0UOI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7Bfje5TIQG ++ cat /tmp/tmp.K62E2s0UOI ++ rm /tmp/tmp.7Bfje5TIQG /tmp/tmp.K62E2s0UOI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bXKnXyj3Ge +++ mktemp ++ local LAST_ERR=/tmp/tmp.W5efgI0LYu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bXKnXyj3Ge ++ cat /tmp/tmp.W5efgI0LYu ++ rm /tmp/tmp.bXKnXyj3Ge /tmp/tmp.W5efgI0LYu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EMYCpHv7t0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.CEIacM7v5d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EMYCpHv7t0 ++ cat /tmp/tmp.CEIacM7v5d ++ rm /tmp/tmp.EMYCpHv7t0 /tmp/tmp.CEIacM7v5d ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ul8SMuTjvL +++ mktemp ++ local LAST_ERR=/tmp/tmp.fF9Z5K2bFC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ul8SMuTjvL ++ cat /tmp/tmp.fF9Z5K2bFC ++ rm /tmp/tmp.Ul8SMuTjvL /tmp/tmp.fF9Z5K2bFC ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FJot7yfZB0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7kDr3uWJ6O +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FJot7yfZB0 +++++ cat /tmp/tmp.7kDr3uWJ6O +++++ rm /tmp/tmp.FJot7yfZB0 /tmp/tmp.7kDr3uWJ6O +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.dHmMMQtdeN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.gW2ecpMDVg +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.dHmMMQtdeN +++++ cat /tmp/tmp.gW2ecpMDVg +++++ rm /tmp/tmp.dHmMMQtdeN /tmp/tmp.gW2ecpMDVg +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JDkbgFnsmf +++ mktemp ++ local LAST_ERR=/tmp/tmp.5Rog3WX3Xb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JDkbgFnsmf ++ cat /tmp/tmp.5Rog3WX3Xb ++ rm /tmp/tmp.JDkbgFnsmf /tmp/tmp.5Rog3WX3Xb ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-3.sql /tmp/tmp.kNkEqeqYPE/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.zL78JirApz ++ mktemp + local LAST_ERR=/tmp/tmp.nWKCRNByL0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zL78JirApz secret/my-cluster-secrets patched + cat /tmp/tmp.nWKCRNByL0 + rm /tmp/tmp.zL78JirApz /tmp/tmp.nWKCRNByL0 + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.gSOo9eN6OW +++ mktemp ++ local LAST_ERR=/tmp/tmp.e2oxyE4pMT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gSOo9eN6OW ++ cat /tmp/tmp.e2oxyE4pMT ++ rm /tmp/tmp.gSOo9eN6OW /tmp/tmp.e2oxyE4pMT ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RayAIYOO7t +++ mktemp ++ local LAST_ERR=/tmp/tmp.KQI5OKV7vv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RayAIYOO7t ++ cat /tmp/tmp.KQI5OKV7vv ++ rm /tmp/tmp.RayAIYOO7t /tmp/tmp.KQI5OKV7vv ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + echo 'waiting for password update' waiting for password update + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ykKyzf8ELf +++ mktemp ++ local LAST_ERR=/tmp/tmp.yuKE9uuR5m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ykKyzf8ELf ++ cat /tmp/tmp.yuKE9uuR5m ++ rm /tmp/tmp.ykKyzf8ELf /tmp/tmp.yuKE9uuR5m ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D3Yu6T6fwf +++ mktemp ++ local LAST_ERR=/tmp/tmp.W1XviZDJwY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D3Yu6T6fwf ++ cat /tmp/tmp.W1XviZDJwY ++ rm /tmp/tmp.D3Yu6T6fwf /tmp/tmp.W1XviZDJwY ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZP6zgfoA8b +++ mktemp ++ local LAST_ERR=/tmp/tmp.OSc791hSl8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZP6zgfoA8b ++ cat /tmp/tmp.OSc791hSl8 ++ rm /tmp/tmp.ZP6zgfoA8b /tmp/tmp.OSc791hSl8 ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zVplps0LCK +++ mktemp ++ local LAST_ERR=/tmp/tmp.sX1O7dQsBB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zVplps0LCK ++ cat /tmp/tmp.sX1O7dQsBB ++ rm /tmp/tmp.zVplps0LCK /tmp/tmp.sX1O7dQsBB ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.C8Cf0YmNhO +++ mktemp ++ local LAST_ERR=/tmp/tmp.huXztjH16C ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.C8Cf0YmNhO ++ cat /tmp/tmp.huXztjH16C ++ rm /tmp/tmp.C8Cf0YmNhO /tmp/tmp.huXztjH16C ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XGHhMOYp6b +++ mktemp ++ local LAST_ERR=/tmp/tmp.4RRW1r5DYu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XGHhMOYp6b ++ cat /tmp/tmp.4RRW1r5DYu ++ rm /tmp/tmp.XGHhMOYp6b /tmp/tmp.4RRW1r5DYu ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eXHihDBXuG +++ mktemp ++ local LAST_ERR=/tmp/tmp.4ayU7Fh29p ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eXHihDBXuG ++ cat /tmp/tmp.4ayU7Fh29p ++ rm /tmp/tmp.eXHihDBXuG /tmp/tmp.4ayU7Fh29p ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YXj56r7Fhh +++ mktemp ++ local LAST_ERR=/tmp/tmp.xNU4tt6tjC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YXj56r7Fhh ++ cat /tmp/tmp.xNU4tt6tjC ++ rm /tmp/tmp.YXj56r7Fhh /tmp/tmp.xNU4tt6tjC ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ifDyMibTE4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qdCN4xSe5w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ifDyMibTE4 ++ cat /tmp/tmp.qdCN4xSe5w ++ rm /tmp/tmp.ifDyMibTE4 /tmp/tmp.qdCN4xSe5w ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.buAABFIboC +++ mktemp ++ local LAST_ERR=/tmp/tmp.bjwuzg5tiW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.buAABFIboC ++ cat /tmp/tmp.bjwuzg5tiW ++ rm /tmp/tmp.buAABFIboC /tmp/tmp.bjwuzg5tiW ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q49VlQdzkG +++ mktemp ++ local LAST_ERR=/tmp/tmp.etpqP1Il0u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q49VlQdzkG ++ cat /tmp/tmp.etpqP1Il0u ++ rm /tmp/tmp.q49VlQdzkG /tmp/tmp.etpqP1Il0u ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JB2CIT6slF +++ mktemp ++ local LAST_ERR=/tmp/tmp.0iTv7RaoCP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JB2CIT6slF ++ cat /tmp/tmp.0iTv7RaoCP ++ rm /tmp/tmp.JB2CIT6slF /tmp/tmp.0iTv7RaoCP ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LO7zD43iqF +++ mktemp ++ local LAST_ERR=/tmp/tmp.0IV4IsrVAB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LO7zD43iqF ++ cat /tmp/tmp.0IV4IsrVAB ++ rm /tmp/tmp.LO7zD43iqF /tmp/tmp.0IV4IsrVAB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.cw0kj3Gpt4 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.clwDq0EweK +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.cw0kj3Gpt4 +++++ cat /tmp/tmp.clwDq0EweK +++++ rm /tmp/tmp.cw0kj3Gpt4 /tmp/tmp.clwDq0EweK +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.n6DrX03xS8 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.A2nFoE8OyN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.n6DrX03xS8 +++++ cat /tmp/tmp.A2nFoE8OyN +++++ rm /tmp/tmp.n6DrX03xS8 /tmp/tmp.A2nFoE8OyN +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ag52jN4DQY +++ mktemp ++ local LAST_ERR=/tmp/tmp.mN0erVXGPG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ag52jN4DQY ++ cat /tmp/tmp.mN0erVXGPG ++ rm /tmp/tmp.ag52jN4DQY /tmp/tmp.mN0erVXGPG ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SxXbcxTb8a +++ mktemp ++ local LAST_ERR=/tmp/tmp.9HrOOKKLyH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SxXbcxTb8a ++ cat /tmp/tmp.9HrOOKKLyH ++ rm /tmp/tmp.SxXbcxTb8a /tmp/tmp.9HrOOKKLyH ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.kNkEqeqYPE/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.oKKHwMS6jo ++ mktemp + local LAST_ERR=/tmp/tmp.UuN3rNKkXa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oKKHwMS6jo secret/my-cluster-secrets patched + cat /tmp/tmp.UuN3rNKkXa + rm /tmp/tmp.oKKHwMS6jo /tmp/tmp.UuN3rNKkXa + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YcK8ulTXnJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.NFO8FIQJls ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YcK8ulTXnJ ++ cat /tmp/tmp.NFO8FIQJls ++ rm /tmp/tmp.YcK8ulTXnJ /tmp/tmp.NFO8FIQJls ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.A4nCMkCnVX +++ mktemp ++ local LAST_ERR=/tmp/tmp.vhYTgUDko9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.A4nCMkCnVX ++ cat /tmp/tmp.vhYTgUDko9 ++ rm /tmp/tmp.A4nCMkCnVX /tmp/tmp.vhYTgUDko9 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e9Ud8pTI44 +++ mktemp ++ local LAST_ERR=/tmp/tmp.n3T7krf22V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e9Ud8pTI44 ++ cat /tmp/tmp.n3T7krf22V ++ rm /tmp/tmp.e9Ud8pTI44 /tmp/tmp.n3T7krf22V ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5Q5teAVbX6 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.bqdB2KAVov +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5Q5teAVbX6 +++++ cat /tmp/tmp.bqdB2KAVov +++++ rm /tmp/tmp.5Q5teAVbX6 /tmp/tmp.bqdB2KAVov +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.B1SkuOzJMh ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ThNWhqpAEz +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.B1SkuOzJMh +++++ cat /tmp/tmp.ThNWhqpAEz +++++ rm /tmp/tmp.B1SkuOzJMh /tmp/tmp.ThNWhqpAEz +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.toDGPEzhUO +++ mktemp ++ local LAST_ERR=/tmp/tmp.ggNyu56LiE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.toDGPEzhUO ++ cat /tmp/tmp.ggNyu56LiE ++ rm /tmp/tmp.toDGPEzhUO /tmp/tmp.ggNyu56LiE ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jsNQzKyBA9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.pRBdai31et ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jsNQzKyBA9 ++ cat /tmp/tmp.pRBdai31et ++ rm /tmp/tmp.jsNQzKyBA9 /tmp/tmp.pRBdai31et ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.kNkEqeqYPE/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.IVq2xQudzT ++ mktemp + local LAST_ERR=/tmp/tmp.4JDjexltca + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IVq2xQudzT perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.4JDjexltca + rm /tmp/tmp.IVq2xQudzT /tmp/tmp.4JDjexltca + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0pjWqNJbiR +++ mktemp ++ local LAST_ERR=/tmp/tmp.HAGboCES24 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0pjWqNJbiR ++ cat /tmp/tmp.HAGboCES24 ++ rm /tmp/tmp.0pjWqNJbiR /tmp/tmp.HAGboCES24 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MpjaNRIXzP +++ mktemp ++ local LAST_ERR=/tmp/tmp.d6nvIDA4Ci ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MpjaNRIXzP ++ cat /tmp/tmp.d6nvIDA4Ci ++ rm /tmp/tmp.MpjaNRIXzP /tmp/tmp.d6nvIDA4Ci ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6y9Eb70BMO +++ mktemp ++ local LAST_ERR=/tmp/tmp.jwaleyBfgs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6y9Eb70BMO ++ cat /tmp/tmp.jwaleyBfgs ++ rm /tmp/tmp.6y9Eb70BMO /tmp/tmp.jwaleyBfgs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yvP1YH2RMB +++ mktemp ++ local LAST_ERR=/tmp/tmp.loKCzzOYsC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yvP1YH2RMB ++ cat /tmp/tmp.loKCzzOYsC ++ rm /tmp/tmp.yvP1YH2RMB /tmp/tmp.loKCzzOYsC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OlsoCx1ZDL +++ mktemp ++ local LAST_ERR=/tmp/tmp.MDEydLfFn4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OlsoCx1ZDL ++ cat /tmp/tmp.MDEydLfFn4 ++ rm /tmp/tmp.OlsoCx1ZDL /tmp/tmp.MDEydLfFn4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fQ525TY2ph +++ mktemp ++ local LAST_ERR=/tmp/tmp.nW5bu1PXC8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fQ525TY2ph ++ cat /tmp/tmp.nW5bu1PXC8 ++ rm /tmp/tmp.fQ525TY2ph /tmp/tmp.nW5bu1PXC8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jfIw3dsGX1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lhXUMXgpFt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jfIw3dsGX1 ++ cat /tmp/tmp.lhXUMXgpFt ++ rm /tmp/tmp.jfIw3dsGX1 /tmp/tmp.lhXUMXgpFt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9Wa9qEp4d6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Uo17POGOJU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9Wa9qEp4d6 ++ cat /tmp/tmp.Uo17POGOJU ++ rm /tmp/tmp.9Wa9qEp4d6 /tmp/tmp.Uo17POGOJU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9W6Nzgcyi9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.IJMZYiPW7f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9W6Nzgcyi9 ++ cat /tmp/tmp.IJMZYiPW7f ++ rm /tmp/tmp.9W6Nzgcyi9 /tmp/tmp.IJMZYiPW7f ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kPxiHC56Sj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4sOwTrN3L3 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kPxiHC56Sj +++++ cat /tmp/tmp.4sOwTrN3L3 +++++ rm /tmp/tmp.kPxiHC56Sj /tmp/tmp.4sOwTrN3L3 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7b5pOTKmsZ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vyNrwP1zPe +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7b5pOTKmsZ +++++ cat /tmp/tmp.vyNrwP1zPe +++++ rm /tmp/tmp.7b5pOTKmsZ /tmp/tmp.vyNrwP1zPe +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8iWIbAMoOs +++ mktemp ++ local LAST_ERR=/tmp/tmp.fHWmBJYiHE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8iWIbAMoOs ++ cat /tmp/tmp.fHWmBJYiHE ++ rm /tmp/tmp.8iWIbAMoOs /tmp/tmp.fHWmBJYiHE ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.UDpupiOOGN ++ mktemp + local LAST_ERR=/tmp/tmp.H7ooGm7aEV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UDpupiOOGN secret/my-cluster-secrets-2 patched + cat /tmp/tmp.H7ooGm7aEV + rm /tmp/tmp.UDpupiOOGN /tmp/tmp.H7ooGm7aEV + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ma4ELxpJgA +++ mktemp ++ local LAST_ERR=/tmp/tmp.HaNavfVIUK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ma4ELxpJgA ++ cat /tmp/tmp.HaNavfVIUK ++ rm /tmp/tmp.ma4ELxpJgA /tmp/tmp.HaNavfVIUK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jKkXVOF7s4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qKb7Qf5fly ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jKkXVOF7s4 ++ cat /tmp/tmp.qKb7Qf5fly ++ rm /tmp/tmp.jKkXVOF7s4 /tmp/tmp.qKb7Qf5fly ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fgl6eW12Up +++ mktemp ++ local LAST_ERR=/tmp/tmp.SAiF5LwRFo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fgl6eW12Up ++ cat /tmp/tmp.SAiF5LwRFo ++ rm /tmp/tmp.fgl6eW12Up /tmp/tmp.SAiF5LwRFo ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.IOEVuwzzyY ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ynAEdno07O +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.IOEVuwzzyY +++++ cat /tmp/tmp.ynAEdno07O +++++ rm /tmp/tmp.IOEVuwzzyY /tmp/tmp.ynAEdno07O +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.IERORsFzCT ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hzOw1gutfQ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.IERORsFzCT +++++ cat /tmp/tmp.hzOw1gutfQ +++++ rm /tmp/tmp.IERORsFzCT /tmp/tmp.hzOw1gutfQ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P2z5oUjo3i +++ mktemp ++ local LAST_ERR=/tmp/tmp.dGzSacPzT9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P2z5oUjo3i ++ cat /tmp/tmp.dGzSacPzT9 ++ rm /tmp/tmp.P2z5oUjo3i /tmp/tmp.dGzSacPzT9 ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kksIc4bkhl +++ mktemp ++ local LAST_ERR=/tmp/tmp.QRVtltsjpZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kksIc4bkhl ++ cat /tmp/tmp.QRVtltsjpZ ++ rm /tmp/tmp.kksIc4bkhl /tmp/tmp.QRVtltsjpZ ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.kNkEqeqYPE/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.FjBHPR92s6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.0vHRZhuqW4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FjBHPR92s6 ++ cat /tmp/tmp.0vHRZhuqW4 ++ rm /tmp/tmp.FjBHPR92s6 /tmp/tmp.0vHRZhuqW4 ++ return 0 + newpass='{iz~5ww4Zi,}zR>u,#' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''{iz~5ww4Zi,}zR>u,#'\'';' '-h some-name-pxc -uroot -p'\''{iz~5ww4Zi,}zR>u,#'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''{iz~5ww4Zi,}zR>u,#'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''{iz~5ww4Zi,}zR>u,#'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wGLRQ3LkiG +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZaihDUL6DA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wGLRQ3LkiG ++ cat /tmp/tmp.ZaihDUL6DA ++ rm /tmp/tmp.wGLRQ3LkiG /tmp/tmp.ZaihDUL6DA ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''{iz~5ww4Zi,}zR>u,#'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''{iz~5ww4Zi,}zR>u,#'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''{iz~5ww4Zi,}zR>u,#'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''{iz~5ww4Zi,}zR>u,#'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vOYo0Qj1tg +++ mktemp ++ local LAST_ERR=/tmp/tmp.wLyXNEP1PC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vOYo0Qj1tg ++ cat /tmp/tmp.wLyXNEP1PC ++ rm /tmp/tmp.vOYo0Qj1tg /tmp/tmp.wLyXNEP1PC ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.kNkEqeqYPE/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.jF5tsAMqKL +++ mktemp ++ local LAST_ERR=/tmp/tmp.4fdrV7fOgd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jF5tsAMqKL ++ cat /tmp/tmp.4fdrV7fOgd ++ rm /tmp/tmp.jF5tsAMqKL /tmp/tmp.4fdrV7fOgd ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.rnJpdDtqa5 ++ mktemp + local LAST_ERR=/tmp/tmp.ILV3J3Mh7V + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rnJpdDtqa5 secret/my-cluster-secrets-2 configured + cat /tmp/tmp.ILV3J3Mh7V Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.rnJpdDtqa5 /tmp/tmp.ILV3J3Mh7V + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FDh9eVPWBQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.lpT1lRV6yb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FDh9eVPWBQ ++ cat /tmp/tmp.lpT1lRV6yb ++ rm /tmp/tmp.FDh9eVPWBQ /tmp/tmp.lpT1lRV6yb ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.kNkEqeqYPE/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1721-8dedf6d8#' ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_OUT=/tmp/tmp.FM8l2pIWnJ ++ mktemp + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-18643~ + local LAST_ERR=/tmp/tmp.e2rWG6u77M + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FM8l2pIWnJ perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.e2rWG6u77M + rm /tmp/tmp.FM8l2pIWnJ /tmp/tmp.e2rWG6u77M + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PWdGrFIB75 +++ mktemp ++ local LAST_ERR=/tmp/tmp.saZRy5EY2M ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PWdGrFIB75 ++ cat /tmp/tmp.saZRy5EY2M ++ rm /tmp/tmp.PWdGrFIB75 /tmp/tmp.saZRy5EY2M ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WIEplCSVrD +++ mktemp ++ local LAST_ERR=/tmp/tmp.RJfjRvoz0X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WIEplCSVrD ++ cat /tmp/tmp.RJfjRvoz0X ++ rm /tmp/tmp.WIEplCSVrD /tmp/tmp.RJfjRvoz0X ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5Wj4DM0TeZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.mHUhiEUKW4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5Wj4DM0TeZ ++ cat /tmp/tmp.mHUhiEUKW4 ++ rm /tmp/tmp.5Wj4DM0TeZ /tmp/tmp.mHUhiEUKW4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.klvEfvxyuF +++ mktemp ++ local LAST_ERR=/tmp/tmp.0xwrvJGT9j ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.klvEfvxyuF ++ cat /tmp/tmp.0xwrvJGT9j ++ rm /tmp/tmp.klvEfvxyuF /tmp/tmp.0xwrvJGT9j ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l89JD31fon +++ mktemp ++ local LAST_ERR=/tmp/tmp.KC68KGggMg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l89JD31fon ++ cat /tmp/tmp.KC68KGggMg ++ rm /tmp/tmp.l89JD31fon /tmp/tmp.KC68KGggMg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N7KL6PojNP +++ mktemp ++ local LAST_ERR=/tmp/tmp.1rgyAt7Crc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N7KL6PojNP ++ cat /tmp/tmp.1rgyAt7Crc ++ rm /tmp/tmp.N7KL6PojNP /tmp/tmp.1rgyAt7Crc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JFmDicI6rW +++ mktemp ++ local LAST_ERR=/tmp/tmp.UOSiggJujp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JFmDicI6rW ++ cat /tmp/tmp.UOSiggJujp ++ rm /tmp/tmp.JFmDicI6rW /tmp/tmp.UOSiggJujp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XHgfj0OVgV +++ mktemp ++ local LAST_ERR=/tmp/tmp.pqLpFXIFdB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XHgfj0OVgV ++ cat /tmp/tmp.pqLpFXIFdB ++ rm /tmp/tmp.XHgfj0OVgV /tmp/tmp.pqLpFXIFdB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IrRFkOsge8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.XQ4FA2Wp3d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IrRFkOsge8 ++ cat /tmp/tmp.XQ4FA2Wp3d ++ rm /tmp/tmp.IrRFkOsge8 /tmp/tmp.XQ4FA2Wp3d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4xVgF6KB3L +++ mktemp ++ local LAST_ERR=/tmp/tmp.QkFLR72tKi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4xVgF6KB3L ++ cat /tmp/tmp.QkFLR72tKi ++ rm /tmp/tmp.4xVgF6KB3L /tmp/tmp.QkFLR72tKi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T2KiwAplPo +++ mktemp ++ local LAST_ERR=/tmp/tmp.gqt1RpaMKb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T2KiwAplPo ++ cat /tmp/tmp.gqt1RpaMKb ++ rm /tmp/tmp.T2KiwAplPo /tmp/tmp.gqt1RpaMKb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1O4NKVSOgu +++ mktemp ++ local LAST_ERR=/tmp/tmp.TRm7jEzfz8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1O4NKVSOgu ++ cat /tmp/tmp.TRm7jEzfz8 ++ rm /tmp/tmp.1O4NKVSOgu /tmp/tmp.TRm7jEzfz8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aJJKHE9pLG +++ mktemp ++ local LAST_ERR=/tmp/tmp.zDQRekWR2R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aJJKHE9pLG ++ cat /tmp/tmp.zDQRekWR2R ++ rm /tmp/tmp.aJJKHE9pLG /tmp/tmp.zDQRekWR2R ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VXMS7u3cKN +++ mktemp ++ local LAST_ERR=/tmp/tmp.WyGJBcVHBe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VXMS7u3cKN ++ cat /tmp/tmp.WyGJBcVHBe ++ rm /tmp/tmp.VXMS7u3cKN /tmp/tmp.WyGJBcVHBe ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W9iAfMV3N1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qaFsU6UKph ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W9iAfMV3N1 ++ cat /tmp/tmp.qaFsU6UKph ++ rm /tmp/tmp.W9iAfMV3N1 /tmp/tmp.qaFsU6UKph ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.GIAG63kGfa ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.csmQE6HIf8 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.GIAG63kGfa +++++ cat /tmp/tmp.csmQE6HIf8 +++++ rm /tmp/tmp.GIAG63kGfa /tmp/tmp.csmQE6HIf8 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JdbfnQ8ZrG +++ mktemp ++ local LAST_ERR=/tmp/tmp.T96Xxy3CSy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JdbfnQ8ZrG ++ cat /tmp/tmp.T96Xxy3CSy ++ rm /tmp/tmp.JdbfnQ8ZrG /tmp/tmp.T96Xxy3CSy ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.5iGG22rUio ++ mktemp + local LAST_ERR=/tmp/tmp.gC63g2Ui9a + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5iGG22rUio secret/my-cluster-secrets patched + cat /tmp/tmp.gC63g2Ui9a + rm /tmp/tmp.5iGG22rUio /tmp/tmp.gC63g2Ui9a + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kK1K3Dc1tZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.8zvBy87Cbr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kK1K3Dc1tZ ++ cat /tmp/tmp.8zvBy87Cbr ++ rm /tmp/tmp.kK1K3Dc1tZ /tmp/tmp.8zvBy87Cbr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ix4OlWdWwA +++ mktemp ++ local LAST_ERR=/tmp/tmp.EwyDfAUvYq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ix4OlWdWwA ++ cat /tmp/tmp.EwyDfAUvYq ++ rm /tmp/tmp.ix4OlWdWwA /tmp/tmp.EwyDfAUvYq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HGlfQUJmE8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sb20ESeG16 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HGlfQUJmE8 ++ cat /tmp/tmp.sb20ESeG16 ++ rm /tmp/tmp.HGlfQUJmE8 /tmp/tmp.sb20ESeG16 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Qhb0M9HnA3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.49XegfbuaW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Qhb0M9HnA3 ++ cat /tmp/tmp.49XegfbuaW ++ rm /tmp/tmp.Qhb0M9HnA3 /tmp/tmp.49XegfbuaW ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LmW22DmdJt +++ mktemp ++ local LAST_ERR=/tmp/tmp.GgKcnx5vNE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LmW22DmdJt ++ cat /tmp/tmp.GgKcnx5vNE ++ rm /tmp/tmp.LmW22DmdJt /tmp/tmp.GgKcnx5vNE ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.zPwfSkrw8r ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.JmP3oE4XYP +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.zPwfSkrw8r +++++ cat /tmp/tmp.JmP3oE4XYP +++++ rm /tmp/tmp.zPwfSkrw8r /tmp/tmp.JmP3oE4XYP +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.q171r6JsZy +++ mktemp ++ local LAST_ERR=/tmp/tmp.jtLE0p72bj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.q171r6JsZy ++ cat /tmp/tmp.jtLE0p72bj ++ rm /tmp/tmp.q171r6JsZy /tmp/tmp.jtLE0p72bj ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TjW0T8lPvf +++ mktemp ++ local LAST_ERR=/tmp/tmp.3DIPcAiKdJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TjW0T8lPvf ++ cat /tmp/tmp.3DIPcAiKdJ ++ rm /tmp/tmp.TjW0T8lPvf /tmp/tmp.3DIPcAiKdJ ++ return 0 + client_pod=pxc-client-6644d8898f-btr4b + wait_pod pxc-client-6644d8898f-btr4b + local pod=pxc-client-6644d8898f-btr4b + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-btr4b ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-btr4b condition met pxc-client-6644d8898f-btr4b.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.kNkEqeqYPE/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1721/e2e-tests/users/compare/select-3.sql /tmp/tmp.kNkEqeqYPE/select-3.sql + destroy users-18643 + local namespace=users-18643 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + sort -u + tee /tmp/tmp.kNkEqeqYPE/operator.log + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.N7BU2QRb09 +++ mktemp ++ local LAST_ERR=/tmp/tmp.cB35uQu8AB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N7BU2QRb09 ++ cat /tmp/tmp.cB35uQu8AB ++ rm /tmp/tmp.N7BU2QRb09 /tmp/tmp.cB35uQu8AB ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-678dd8bcd4-vbnm5 ++ mktemp + local LAST_OUT=/tmp/tmp.pkerzVtCQB ++ mktemp + local LAST_ERR=/tmp/tmp.GvukVqX0Wn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-678dd8bcd4-vbnm5 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pkerzVtCQB + cat /tmp/tmp.GvukVqX0Wn + rm /tmp/tmp.pkerzVtCQB /tmp/tmp.GvukVqX0Wn + return 0 2024-06-04T13:48:52.001Z INFO setup Manager starting up {"gitCommit": "8dedf6d8dd55c5dbdda7e2665ca8e0ce0546dc31", "gitBranch": "PR-1721-8dedf6d8", "buildTime": "2024-06-04T11:34:17Z", "goVersion": "go1.22.3", "os": "linux", "arch": "amd64"} 2024-06-04T13:48:52.001Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1381000"} 2024-06-04T13:48:52.002Z INFO setup Registering Components. 2024-06-04T13:48:56.303Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-06-04T13:48:56.429Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-06-04T13:48:56.429Z INFO controller-runtime.metrics Starting metrics server 2024-06-04T13:48:56.429Z INFO controller-runtime.webhook Starting webhook server 2024-06-04T13:48:56.429Z INFO setup Starting the Cmd. 2024-06-04T13:48:56.429Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-06-04T13:48:56.430Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-06-04T13:48:56.430Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-06-04T13:48:56.430Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-06-04T13:48:56.631Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-06-04T13:48:56.759Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-06-04T13:48:56.760Z DEBUG events percona-xtradb-cluster-operator-678dd8bcd4-vbnm5_71b7ae57-f5ae-47ab-8b3c-138b9d059f41 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"f62fb1bb-0a64-4a82-993a-7e770beafb0e","apiVersion":"coordination.k8s.io/v1","resourceVersion":"71577"}, "reason": "LeaderElection"} 2024-06-04T13:48:56.760Z INFO Starting Controller {"controller": "pxc-controller"} 2024-06-04T13:48:56.760Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-06-04T13:48:56.760Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: unknown type"} 2024-06-04T13:48:56.760Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: unknown type"} 2024-06-04T13:48:56.761Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-06-04T13:48:56.761Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: unknown type"} 2024-06-04T13:48:56.868Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-06-04T13:48:56.873Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-06-04T13:48:56.873Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-06-04T13:49:20.627Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5c732776-b1b3-46e7-ad93-d7626e56753c", "version": "1.15.0"} 2024-06-04T13:50:40.555Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8d458a8d-def4-4f6d-ad49-7a708707650e", "user": "operator"} 2024-06-04T13:50:40.607Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8d458a8d-def4-4f6d-ad49-7a708707650e", "user": "monitor"} 2024-06-04T13:50:40.714Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8d458a8d-def4-4f6d-ad49-7a708707650e"} 2024-06-04T13:50:40.789Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8d458a8d-def4-4f6d-ad49-7a708707650e"} 2024-06-04T13:50:40.951Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8d458a8d-def4-4f6d-ad49-7a708707650e", "user": "xtrabackup"} 2024-06-04T13:50:41.062Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8d458a8d-def4-4f6d-ad49-7a708707650e"} 2024-06-04T13:50:41.121Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8d458a8d-def4-4f6d-ad49-7a708707650e", "user": "replication"} 2024-06-04T13:50:41.502Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8d458a8d-def4-4f6d-ad49-7a708707650e", "err": "get primary pxc pod: not found"} 2024-06-04T13:50:45.666Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8d330af0-5c30-45f5-85b1-a406770f39af", "err": "get primary pxc pod: not found"} 2024-06-04T13:50:50.869Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "53c3fe0b-3fed-42ab-b433-8165da17b06d", "err": "get primary pxc pod: not found"} 2024-06-04T13:50:56.450Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "47cc6df1-9510-4d0a-89a6-4e88d4c56817", "err": "get primary pxc pod: not found"} 2024-06-04T13:53:05.705Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5b5d9688-ae67-4a82-9fc0-863d23faa07d", "user": "root"} 2024-06-04T13:53:06.064Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5b5d9688-ae67-4a82-9fc0-863d23faa07d", "new version": "8.0.36-28.1"} 2024-06-04T13:53:09.707Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5b5d9688-ae67-4a82-9fc0-863d23faa07d"} 2024-06-04T13:53:15.306Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "00dec1e9-42ab-4f7d-8a7a-9108881d25cc"} 2024-06-04T13:53:20.624Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "d7663cca-e755-435f-9efe-9b1b139cce80"} 2024-06-04T13:53:26.635Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "7a094010-4e73-4dde-a266-a396c3fb1ff0"} 2024-06-04T13:53:31.728Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "b7e21d83-5ebb-4c3b-9778-1091bdc81ec1"} 2024-06-04T13:53:37.110Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6cf866f8-09b7-49d6-91d8-4ed8097698a4"} 2024-06-04T13:53:42.734Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "66da822b-a6da-4065-b0ba-bd2f998c916e"} 2024-06-04T13:53:48.402Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "55685659-d440-4a34-995e-5adbb2efe05a"} 2024-06-04T13:53:53.701Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "9d30551e-2f66-4828-a4b5-e319f8d521dd"} 2024-06-04T13:53:58.925Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "b7cd241a-8b39-4f3e-a278-d838c6e39853"} 2024-06-04T13:54:05.433Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6c9edd3e-d7a8-45f8-896d-5fc0ce3b96ff"} 2024-06-04T13:54:10.550Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5a78abe8-3b0c-4357-ac94-a01f6b6f1f30"} 2024-06-04T13:54:12.536Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "f53b98f4-7820-4da3-8081-aa2edf4b5d99", "user": "root"} 2024-06-04T13:54:12.603Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "f53b98f4-7820-4da3-8081-aa2edf4b5d99", "user": "root"} 2024-06-04T13:54:12.614Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "f53b98f4-7820-4da3-8081-aa2edf4b5d99", "secret": "some-name-mysql-init", "user": "root"} 2024-06-04T13:54:18.031Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "f53b98f4-7820-4da3-8081-aa2edf4b5d99"} 2024-06-04T13:54:18.045Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "f53b98f4-7820-4da3-8081-aa2edf4b5d99", "user": "root"} 2024-06-04T13:54:18.106Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "f53b98f4-7820-4da3-8081-aa2edf4b5d99", "user": "root"} 2024-06-04T13:54:21.811Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "f53b98f4-7820-4da3-8081-aa2edf4b5d99"} 2024-06-04T13:54:27.841Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "3b3e30c2-ea5a-4c90-890a-2583afb75c4b"} 2024-06-04T13:54:32.539Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "13dcae9f-d3c4-4d8a-ace3-b5d271049b11"} 2024-06-04T13:54:55.103Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "a3a262c0-993c-48ff-8af8-c5843a73d50c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T13:54:56.652Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5ada3999-7aad-42fd-8940-47bdaec2e8f2", "err": "get primary pxc pod: not found"} 2024-06-04T13:55:00.406Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5ada3999-7aad-42fd-8940-47bdaec2e8f2", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T13:55:01.950Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "0373b4ad-43cd-4515-a504-cbe18b6a4505", "user": "proxyadmin"} 2024-06-04T13:55:01.950Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "0373b4ad-43cd-4515-a504-cbe18b6a4505", "user": "proxyadmin"} 2024-06-04T13:55:02.026Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "0373b4ad-43cd-4515-a504-cbe18b6a4505", "user": "proxyadmin"} 2024-06-04T13:55:02.043Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "0373b4ad-43cd-4515-a504-cbe18b6a4505", "user": "proxyadmin"} 2024-06-04T13:55:02.043Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "0373b4ad-43cd-4515-a504-cbe18b6a4505", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-06-04T13:55:02.223Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "0373b4ad-43cd-4515-a504-cbe18b6a4505", "err": "get primary pxc pod: not found"} 2024-06-04T13:55:02.282Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "0373b4ad-43cd-4515-a504-cbe18b6a4505", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T13:55:24.268Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "110fd6e2-3996-475e-87d4-42589eed37c9", "err": "get primary pxc pod: not found"} 2024-06-04T13:55:29.761Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8788b344-92b3-4110-909f-ef049e41179a", "err": "get primary pxc pod: not found"} 2024-06-04T13:55:45.901Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "9c37bac3-8248-41b8-8fd3-1e5de9b1ebcc"} 2024-06-04T13:55:52.437Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "d2363261-2592-40a2-9362-d3cba5913ba6"} 2024-06-04T13:55:55.874Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6dbc5b8d-5043-4a14-ac3c-59d42b17fa42", "user": "xtrabackup"} 2024-06-04T13:55:55.953Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6dbc5b8d-5043-4a14-ac3c-59d42b17fa42", "user": "xtrabackup"} 2024-06-04T13:55:56.074Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6dbc5b8d-5043-4a14-ac3c-59d42b17fa42", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-04T13:55:56.170Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6dbc5b8d-5043-4a14-ac3c-59d42b17fa42", "user": "xtrabackup"} 2024-06-04T13:55:56.197Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6dbc5b8d-5043-4a14-ac3c-59d42b17fa42", "user": "xtrabackup"} 2024-06-04T13:55:56.217Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6dbc5b8d-5043-4a14-ac3c-59d42b17fa42", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-06-04T13:56:02.425Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6dbc5b8d-5043-4a14-ac3c-59d42b17fa42"} 2024-06-04T13:56:54.052Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "7d0f1554-1166-455a-96d0-c3e37505d043", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T13:57:04.366Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "15b223ee-f813-4bf8-8588-65b1000cc5ba", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T13:57:37.549Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "f06bd505-4a10-4081-9762-168630248ceb", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T13:57:38.023Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "f910db2b-be8b-4353-901a-f9be2f8c1a15", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T13:57:42.988Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "bce8d142-3e72-4a49-a19c-753d2662e785", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T13:57:48.574Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c974cbcc-f389-43ee-8eb9-7f235b6972b6", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T13:57:53.815Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "eb469cbc-22dd-4ec4-a4ef-89b4b2fc9d53", "primary name": "some-name-pxc-0.some-name-pxc.users-18643.svc.cluster.local"} 2024-06-04T13:57:59.034Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "206bc55e-a51f-4bbf-8b87-d613702c57f2", "primary name": "some-name-pxc-0.some-name-pxc.users-18643.svc.cluster.local"} 2024-06-04T13:58:04.297Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "31d53a3c-5f4d-4bc3-b33f-036e48bfbb2a", "primary name": "some-name-pxc-0.some-name-pxc.users-18643.svc.cluster.local"} 2024-06-04T13:58:09.600Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "a54b9562-42a7-4eb8-9477-a492bb1b569c", "primary name": "some-name-pxc-0.some-name-pxc.users-18643.svc.cluster.local"} 2024-06-04T13:58:14.822Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "ab600410-2ad9-4089-88ce-b2bcc044eec8", "primary name": "some-name-pxc-0.some-name-pxc.users-18643.svc.cluster.local"} 2024-06-04T13:58:30.020Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "85b539af-4f38-4315-bb21-fd84b22045e4"} 2024-06-04T13:58:34.907Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8e507c7a-bf67-42d3-994f-98628cebc883"} 2024-06-04T13:58:40.207Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "1eb0c6f7-4a2e-486f-87e9-94e710261868"} 2024-06-04T13:58:45.631Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "0258f1d3-e8ed-4957-9918-b2903162c73b"} 2024-06-04T13:58:47.881Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "4a56247d-9615-4725-a90d-6a5738d4cbdd", "user": "monitor"} 2024-06-04T13:58:47.920Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "4a56247d-9615-4725-a90d-6a5738d4cbdd", "user": "monitor"} 2024-06-04T13:58:47.931Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "4a56247d-9615-4725-a90d-6a5738d4cbdd", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-04T13:58:47.977Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "4a56247d-9615-4725-a90d-6a5738d4cbdd", "user": "monitor"} 2024-06-04T13:58:47.992Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "4a56247d-9615-4725-a90d-6a5738d4cbdd", "user": "monitor"} 2024-06-04T13:58:48.094Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "4a56247d-9615-4725-a90d-6a5738d4cbdd", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-04T13:58:50.500Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "4a56247d-9615-4725-a90d-6a5738d4cbdd", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T13:59:38.826Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "33d28896-0c53-410b-8873-3797a2e3a9a8", "user": "monitor"} 2024-06-04T13:59:42.166Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "33d28896-0c53-410b-8873-3797a2e3a9a8"} 2024-06-04T13:59:43.810Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5e0b95c6-189c-4c3f-bc01-9b9fac8bf6d3", "user": "monitor"} 2024-06-04T13:59:44.124Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5e0b95c6-189c-4c3f-bc01-9b9fac8bf6d3", "user": "monitor"} 2024-06-04T13:59:44.156Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5e0b95c6-189c-4c3f-bc01-9b9fac8bf6d3", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-04T13:59:47.375Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5e0b95c6-189c-4c3f-bc01-9b9fac8bf6d3"} 2024-06-04T13:59:52.701Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "1fc2a267-6efa-4ad2-a40f-aa9691eb2962"} 2024-06-04T13:59:58.104Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "67b28960-ae22-450b-9f92-4201c4be2354"} 2024-06-04T14:00:03.791Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "bb36ac54-cf86-4cf0-8cf4-1b02f02bd0d2"} 2024-06-04T14:00:09.113Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "7b52eb99-03d3-43bd-9995-4baac9c73efe"} 2024-06-04T14:00:11.580Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "691c34c4-1ea9-4cb1-a409-78925b24c249", "user": "operator"} 2024-06-04T14:00:11.618Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "691c34c4-1ea9-4cb1-a409-78925b24c249", "user": "operator"} 2024-06-04T14:00:11.631Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "691c34c4-1ea9-4cb1-a409-78925b24c249", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-04T14:00:11.646Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "691c34c4-1ea9-4cb1-a409-78925b24c249", "user": "operator"} 2024-06-04T14:00:11.671Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "691c34c4-1ea9-4cb1-a409-78925b24c249", "user": "operator"} 2024-06-04T14:00:11.727Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "691c34c4-1ea9-4cb1-a409-78925b24c249", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-04T14:00:13.052Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "691c34c4-1ea9-4cb1-a409-78925b24c249", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18643.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18643.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18643.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18643.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18643.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18643.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T14:00:50.199Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "06c2c7d8-9da5-4333-881a-080fcd32b608"} 2024-06-04T14:00:58.713Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "80832c76-814a-44db-a548-2864b64e79ff"} 2024-06-04T14:01:05.168Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "3194d6e7-9e81-4e3c-8d0d-1002bd082dfa"} 2024-06-04T14:01:10.801Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "96b8e7f1-b2eb-4547-b3ba-61eb9d141206"} 2024-06-04T14:01:11.570Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "secrets": "my-cluster-secrets-2"} 2024-06-04T14:01:11.582Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "root"} 2024-06-04T14:01:11.637Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "root"} 2024-06-04T14:01:11.653Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "secret": "some-name-mysql-init", "user": "root"} 2024-06-04T14:01:17.111Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350"} 2024-06-04T14:01:17.123Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "root"} 2024-06-04T14:01:17.183Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "root"} 2024-06-04T14:01:17.200Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "operator"} 2024-06-04T14:01:17.240Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "operator"} 2024-06-04T14:01:17.249Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-04T14:01:17.263Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "operator"} 2024-06-04T14:01:17.288Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "operator"} 2024-06-04T14:01:17.299Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "monitor"} 2024-06-04T14:01:17.321Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "monitor"} 2024-06-04T14:01:17.331Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-04T14:01:17.383Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "monitor"} 2024-06-04T14:01:17.396Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "monitor"} 2024-06-04T14:01:17.536Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "xtrabackup"} 2024-06-04T14:01:17.574Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "xtrabackup"} 2024-06-04T14:01:17.590Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-04T14:01:17.606Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "xtrabackup"} 2024-06-04T14:01:17.640Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "xtrabackup"} 2024-06-04T14:01:17.652Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "replication"} 2024-06-04T14:01:17.692Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "replication"} 2024-06-04T14:01:17.702Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-04T14:01:17.715Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "replication"} 2024-06-04T14:01:17.750Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "replication"} 2024-06-04T14:01:17.750Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "proxyadmin"} 2024-06-04T14:01:17.810Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "proxyadmin"} 2024-06-04T14:01:17.826Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "user": "proxyadmin"} 2024-06-04T14:01:17.826Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "last-applied-secret": "dc7489466159f123f26fa18d424b76ac5ee3b9d37d050bef8dcc586c13ceb7f7"} 2024-06-04T14:01:17.826Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "last-applied-secret": "dc7489466159f123f26fa18d424b76ac5ee3b9d37d050bef8dcc586c13ceb7f7"} 2024-06-04T14:01:18.085Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "c3aa7cdd-91ed-479e-8809-66673e566350", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T14:02:18.332Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "b48170b5-897e-43c9-bbee-f0b5c7b320fa", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.174.216.34:33062: connect: connection refused"} 2024-06-04T14:03:16.841Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5ef058c9-7255-445b-8979-14dc7a64d1d0", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T14:03:22.064Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5478ff16-9049-4ec0-a05b-0fef4430ee64", "primary name": "some-name-pxc-0.some-name-pxc.users-18643.svc.cluster.local"} 2024-06-04T14:03:27.291Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "a2738a92-20c3-4827-af6e-88f76e7a5be3", "primary name": "some-name-pxc-0.some-name-pxc.users-18643.svc.cluster.local"} 2024-06-04T14:03:37.892Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "62379fd5-2771-4643-b0c4-d081e0a8c986", "primary name": "some-name-pxc-0.some-name-pxc.users-18643.svc.cluster.local"} 2024-06-04T14:03:43.182Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "3604908a-ba16-49b0-8b52-27392738ceeb", "primary name": "some-name-pxc-0.some-name-pxc.users-18643.svc.cluster.local"} 2024-06-04T14:03:53.633Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "0e7bd181-9974-4887-8ceb-ea725b3e442f", "primary name": "some-name-pxc-0.some-name-pxc.users-18643.svc.cluster.local"} 2024-06-04T14:03:59.227Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8c84501e-e926-4e45-8a41-5583771194a7", "user": "monitor"} 2024-06-04T14:03:59.520Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8c84501e-e926-4e45-8a41-5583771194a7", "user": "monitor"} 2024-06-04T14:03:59.552Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8c84501e-e926-4e45-8a41-5583771194a7", "last-applied-secret": "dc7489466159f123f26fa18d424b76ac5ee3b9d37d050bef8dcc586c13ceb7f7"} 2024-06-04T14:04:03.280Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "8c84501e-e926-4e45-8a41-5583771194a7"} 2024-06-04T14:04:07.581Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5a168913-885f-4c53-84ff-0fca1ebc7f1e"} 2024-06-04T14:04:12.990Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "444bdb3a-4f35-482b-a99a-1a748a0e868c"} 2024-06-04T14:04:18.407Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "3b14daf8-e07b-447f-8010-8d0cc137c13f"} 2024-06-04T14:04:20.730Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "2d600889-2745-49ba-b2d9-689e8dba4eb0", "user": "operator"} 2024-06-04T14:04:20.768Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "2d600889-2745-49ba-b2d9-689e8dba4eb0", "user": "operator"} 2024-06-04T14:04:20.781Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "2d600889-2745-49ba-b2d9-689e8dba4eb0", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-04T14:04:20.795Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "2d600889-2745-49ba-b2d9-689e8dba4eb0", "user": "operator"} 2024-06-04T14:04:20.830Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "2d600889-2745-49ba-b2d9-689e8dba4eb0", "user": "operator"} 2024-06-04T14:04:20.879Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "2d600889-2745-49ba-b2d9-689e8dba4eb0", "last-applied-secret": "2c44540f5d1256789b6c6ddbd646189ffa22c7b0129dd9fb94d0c084cf843649"} 2024-06-04T14:04:22.163Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "2d600889-2745-49ba-b2d9-689e8dba4eb0", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18643.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18643.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18643.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18643.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18643.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-18643.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T14:04:48.028Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "d34aef9e-8a95-499d-8f5e-e0dd22f04d17"} 2024-06-04T14:04:51.779Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "23168735-f6ef-43ff-859e-d1612b79226b"} 2024-06-04T14:04:57.901Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "9ac3f530-4e48-44a2-a146-0a1e823bcb6c"} 2024-06-04T14:05:02.184Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "cf54cb7f-f288-4506-aaf2-82b716040b4c"} 2024-06-04T14:05:07.505Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "30f2cbea-e6d2-48f0-a0fe-e39a28e26ae0"} 2024-06-04T14:05:13.307Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "0e9c64e7-f5f5-4716-9d93-4f97accfc91f"} 2024-06-04T14:05:18.464Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6d48dac4-0579-496d-bd1d-d098cac1f2a3"} 2024-06-04T14:05:23.868Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "430bfc87-274c-4325-9b9c-c96ae83b9204"} 2024-06-04T14:05:29.959Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "7fbd886d-e727-4087-be84-d7a4afa578aa"} 2024-06-04T14:05:36.755Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "a7784b6c-89d1-472d-becb-e99c5d214bdb"} 2024-06-04T14:05:40.718Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "0f2080cc-d6f3-4e69-b407-2435242aa3e7"} 2024-06-04T14:05:46.161Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "4033f268-a442-43fd-a7a6-536d0c92083f"} 2024-06-04T14:05:51.659Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "938b3157-02e0-48d9-8a59-b2e3e28afed5"} 2024-06-04T14:05:57.659Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "5943ea65-14a1-430d-a98a-df2f6a4e3271"} 2024-06-04T14:06:02.497Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "e0914b7f-4967-42d3-9c94-3d3159b755df"} 2024-06-04T14:06:07.798Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "888da1c8-256b-4806-9277-ffcf8cd07839"} 2024-06-04T14:06:13.359Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6ba3420d-9358-4dfb-aa81-1e02ee631c62"} 2024-06-04T14:06:18.996Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "0fb5b28e-f668-482e-b3f4-bd3ef2035eb5"} 2024-06-04T14:06:21.267Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "root"} 2024-06-04T14:06:21.324Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "root"} 2024-06-04T14:06:21.333Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "secret": "some-name-mysql-init", "user": "root"} 2024-06-04T14:06:26.134Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a"} 2024-06-04T14:06:26.239Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "root"} 2024-06-04T14:06:26.297Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "root"} 2024-06-04T14:06:26.330Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "monitor"} 2024-06-04T14:06:26.354Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "monitor"} 2024-06-04T14:06:26.433Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-04T14:06:26.486Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "monitor"} 2024-06-04T14:06:26.590Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "monitor"} 2024-06-04T14:06:26.846Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "xtrabackup"} 2024-06-04T14:06:26.885Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "xtrabackup"} 2024-06-04T14:06:26.913Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-04T14:06:26.942Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "xtrabackup"} 2024-06-04T14:06:26.979Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "xtrabackup"} 2024-06-04T14:06:26.994Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "proxyadmin"} 2024-06-04T14:06:27.047Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "proxyadmin"} 2024-06-04T14:06:27.065Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "user": "proxyadmin"} 2024-06-04T14:06:27.065Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "last-applied-secret": "fde358ee0c74e62ccf204aa906a280093e59d3cf42b9b35236040e257fcc7afc"} 2024-06-04T14:06:27.065Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "last-applied-secret": "fde358ee0c74e62ccf204aa906a280093e59d3cf42b9b35236040e257fcc7afc"} 2024-06-04T14:06:27.279Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "07f6d09f-35a0-4d7b-a3c6-ae33f441536a", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-04T14:06:38.560Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: b36c9bb7-02dd-4dd0-b872-fb742085dbe2 2024-06-04T14:07:33.549Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "e6e1bce5-58b8-4f60-b0dc-f788bc224480", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.174.216.38:33062: connect: connection refused"} 2024-06-04T14:07:33.830Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "770f748d-a62d-46a7-a3fc-ab077b9a87b6", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.174.216.38:33062: connect: connection refused"} 2024-06-04T14:08:16.812Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "beedbdc0-26c5-441b-b8f7-88cccc563473", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T14:08:35.813Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "729c3653-beee-43d3-97f8-24d33061fca4", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T14:09:02.504Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "root"} 2024-06-04T14:09:02.555Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "root"} 2024-06-04T14:09:02.568Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "secret": "some-name-mysql-init", "user": "root"} 2024-06-04T14:09:02.582Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "root"} 2024-06-04T14:09:02.632Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "root"} 2024-06-04T14:09:02.647Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "operator"} 2024-06-04T14:09:02.670Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "operator"} 2024-06-04T14:09:02.683Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-04T14:09:02.693Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "operator"} 2024-06-04T14:09:02.725Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "operator"} 2024-06-04T14:09:02.740Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "monitor"} 2024-06-04T14:09:02.779Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "monitor"} 2024-06-04T14:09:02.798Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-04T14:09:02.813Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "monitor"} 2024-06-04T14:09:02.961Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "xtrabackup"} 2024-06-04T14:09:02.996Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "xtrabackup"} 2024-06-04T14:09:03.010Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-04T14:09:03.020Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "xtrabackup"} 2024-06-04T14:09:03.049Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "xtrabackup"} 2024-06-04T14:09:03.066Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "replication"} 2024-06-04T14:09:03.097Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "replication"} 2024-06-04T14:09:03.110Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-04T14:09:03.120Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "replication"} 2024-06-04T14:09:03.156Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "user": "replication"} 2024-06-04T14:09:03.156Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-04T14:09:03.156Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "6efb88e8-7608-45ca-8322-5fb6a40bdeb9", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-04T14:10:32.868Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "3247ed97-3b22-4b04-a0ab-dd5f875a13c9", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T14:10:33.201Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "d4ea9ba5-a024-433f-a7c9-a846fe8cfa69", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T14:10:38.167Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "bdefae84-f065-46f7-b36d-67e9840dfc0b", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T14:10:58.603Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "cee44a9c-51ac-49a9-968c-93a05817ba0d", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-18643 on 10.174.224.10:53: no such host"} 2024-06-04T14:11:20.032Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "4646cb92-f6ba-4539-a9fc-b90c47fe6703", "user": "monitor"} 2024-06-04T14:11:20.411Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "4646cb92-f6ba-4539-a9fc-b90c47fe6703", "user": "monitor"} 2024-06-04T14:11:20.435Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "4646cb92-f6ba-4539-a9fc-b90c47fe6703", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-04T14:11:36.718Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "95b8bdfe-3b9a-405f-956b-c03b3d81cdad", "user": "monitor"} 2024-06-04T14:11:36.753Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "95b8bdfe-3b9a-405f-956b-c03b3d81cdad", "user": "monitor"} 2024-06-04T14:11:36.768Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "95b8bdfe-3b9a-405f-956b-c03b3d81cdad", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-04T14:11:36.781Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "95b8bdfe-3b9a-405f-956b-c03b3d81cdad", "user": "monitor"} 2024-06-04T14:11:36.915Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "95b8bdfe-3b9a-405f-956b-c03b3d81cdad", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} 2024-06-04T14:12:47.614Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "3a291711-b6e2-4a3b-903a-f376dc1b9241", "user": "monitor"} 2024-06-04T14:12:47.910Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "3a291711-b6e2-4a3b-903a-f376dc1b9241", "user": "monitor"} 2024-06-04T14:12:47.938Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-18643", "name": "some-name", "reconcileID": "3a291711-b6e2-4a3b-903a-f376dc1b9241", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/06/04 13:57:48 packets.go:37: read tcp 10.174.217.5:43576->10.174.218.68:33062: read: connection reset by peer [mysql] 2024/06/04 14:07:33 packets.go:37: read tcp 10.174.217.5:52176->10.174.216.38:33062: read: connection reset by peer [mysql] 2024/06/04 14:08:35 packets.go:37: unexpected EOF [mysql] 2024/06/04 14:10:58 packets.go:37: read tcp 10.174.217.5:58464->10.174.228.225:3306: i/o timeout sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + grep -v NAMESPACE + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-18643 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.X97aOPS8af ++ mktemp + local LAST_ERR=/tmp/tmp.qDVfU8k1Uy + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.X97aOPS8af perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.qDVfU8k1Uy + rm /tmp/tmp.X97aOPS8af /tmp/tmp.qDVfU8k1Uy + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.AAjRamR3Dk ++ mktemp + local LAST_ERR=/tmp/tmp.jf77BqZsJq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AAjRamR3Dk No resources found + cat /tmp/tmp.jf77BqZsJq + rm /tmp/tmp.AAjRamR3Dk /tmp/tmp.jf77BqZsJq + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.lSRfl7kva0 ++ mktemp + local LAST_ERR=/tmp/tmp.o2uIV8otre + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lSRfl7kva0 No resources found + cat /tmp/tmp.o2uIV8otre + rm /tmp/tmp.lSRfl7kva0 /tmp/tmp.o2uIV8otre + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.roYPyo0iIW ++ mktemp + local LAST_ERR=/tmp/tmp.L3RwOoFlsI + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.roYPyo0iIW validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.L3RwOoFlsI + rm /tmp/tmp.roYPyo0iIW /tmp/tmp.L3RwOoFlsI + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-18643 + rm -rf /tmp/tmp.kNkEqeqYPE + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + local LAST_OUT=/tmp/tmp.mRbbProFDh + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.zNQwCdErb3 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.EmN42A855O + local exit_status=0 + local LAST_ERR=/tmp/tmp.7JnEJrlK6k + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-18643