Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/logs/users-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-32755 + local ns=users-32755 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-20860 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.5jOOz1Kkde ++ mktemp + local LAST_ERR=/tmp/tmp.dc1ZtJko3X + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5jOOz1Kkde perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.dc1ZtJko3X + rm /tmp/tmp.5jOOz1Kkde /tmp/tmp.dc1ZtJko3X + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.LTRnO10DiL ++ mktemp + local LAST_ERR=/tmp/tmp.UH6Sy5bb4M + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LTRnO10DiL No resources found + cat /tmp/tmp.UH6Sy5bb4M + rm /tmp/tmp.LTRnO10DiL /tmp/tmp.UH6Sy5bb4M + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.c9eVNZ17xL ++ mktemp + local LAST_ERR=/tmp/tmp.fU9kjPYhGY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.c9eVNZ17xL No resources found + cat /tmp/tmp.fU9kjPYhGY + rm /tmp/tmp.c9eVNZ17xL /tmp/tmp.fU9kjPYhGY + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' ++ helm list --all-namespaces --filter chaos-mesh + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace + xargs kubectl delete ns ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.Stm2KLpwhn ++ mktemp + local LAST_OUT=/tmp/tmp.nK9ZmgQJdT + local LAST_ERR=/tmp/tmp.CInlHIMVUY + local exit_status=0 ++ mktemp ++ seq 0 2 + local LAST_ERR=/tmp/tmp.Z2CNE5XFae + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Stm2KLpwhn + cat /tmp/tmp.CInlHIMVUY + rm /tmp/tmp.Stm2KLpwhn /tmp/tmp.CInlHIMVUY + return 0 namespace "users-20860" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nK9ZmgQJdT namespace "pxc-operator" deleted + cat /tmp/tmp.Z2CNE5XFae + rm /tmp/tmp.nK9ZmgQJdT /tmp/tmp.Z2CNE5XFae + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.WXbbLfIUTO ++ mktemp + local LAST_ERR=/tmp/tmp.7t88gC6TI2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WXbbLfIUTO namespace/pxc-operator created + cat /tmp/tmp.7t88gC6TI2 + rm /tmp/tmp.WXbbLfIUTO /tmp/tmp.7t88gC6TI2 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jbvi3XOkRm +++ mktemp ++ local LAST_ERR=/tmp/tmp.gVGH3uif1i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Jbvi3XOkRm ++ cat /tmp/tmp.gVGH3uif1i ++ rm /tmp/tmp.Jbvi3XOkRm /tmp/tmp.gVGH3uif1i ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1751-f9555a6f-10-cluster4 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.KNk8MMdbeC ++ mktemp + local LAST_ERR=/tmp/tmp.WZWg3Bj3YX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1751-f9555a6f-10-cluster4 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KNk8MMdbeC Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1751-f9555a6f-10-cluster4" modified. + cat /tmp/tmp.WZWg3Bj3YX + rm /tmp/tmp.KNk8MMdbeC /tmp/tmp.WZWg3Bj3YX + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.D7FhmsmMbI ++ mktemp + local LAST_ERR=/tmp/tmp.oAOcIR5HM6 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.D7FhmsmMbI customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.oAOcIR5HM6 + rm /tmp/tmp.D7FhmsmMbI /tmp/tmp.oAOcIR5HM6 + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.JxblNu5dkl ++ mktemp + local LAST_ERR=/tmp/tmp.UV6AFLYeWJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JxblNu5dkl clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.UV6AFLYeWJ + rm /tmp/tmp.JxblNu5dkl /tmp/tmp.UV6AFLYeWJ + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1751-f9555a6f^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.hwARR5q4b0 ++ mktemp + local LAST_ERR=/tmp/tmp.GpFr4gsuN4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hwARR5q4b0 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.GpFr4gsuN4 + rm /tmp/tmp.hwARR5q4b0 /tmp/tmp.GpFr4gsuN4 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.2JCsmtqRkW ++ mktemp + local LAST_ERR=/tmp/tmp.BiG2fJW22f + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2JCsmtqRkW pod/percona-xtradb-cluster-operator-7888f9874f-8zpx8 condition met + cat /tmp/tmp.BiG2fJW22f + rm /tmp/tmp.2JCsmtqRkW /tmp/tmp.BiG2fJW22f + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.nd4lTPWdcT +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fp0iE5bb1A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nd4lTPWdcT ++ cat /tmp/tmp.Fp0iE5bb1A ++ rm /tmp/tmp.nd4lTPWdcT /tmp/tmp.Fp0iE5bb1A ++ return 0 + wait_pod percona-xtradb-cluster-operator-7888f9874f-8zpx8 480 pxc-operator + local pod=percona-xtradb-cluster-operator-7888f9874f-8zpx8 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-7888f9874f-8zpx8 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-7888f9874f-8zpx8 condition met percona-xtradb-cluster-operator-7888f9874f-8zpx8.Ok + sleep 3 + create_namespace users-32755 + local namespace=users-32755 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ awk '{print $1}' ++ grep chaos-mesh.org + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-32755' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-32755 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-32755 + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.jdlwyokyA0 ++ mktemp + local LAST_ERR=/tmp/tmp.TTQ8xCCbF5 + local exit_status=0 ++ seq 0 2 ++ mktemp + local LAST_OUT=/tmp/tmp.yIEkTVnRXn ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-32755 + local LAST_ERR=/tmp/tmp.9uCjoBfImn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yIEkTVnRXn + cat /tmp/tmp.9uCjoBfImn + rm /tmp/tmp.yIEkTVnRXn /tmp/tmp.9uCjoBfImn + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-32755 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-32755 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.jdlwyokyA0 + cat /tmp/tmp.TTQ8xCCbF5 Error from server (NotFound): namespaces "users-32755" not found + rm /tmp/tmp.jdlwyokyA0 /tmp/tmp.TTQ8xCCbF5 + return 1 + : + wait_for_delete namespace/users-32755 + local res=namespace/users-32755 + echo -n 'namespace/users-32755 - ' namespace/users-32755 - + set +o xtrace Error from server (NotFound): namespaces "users-32755" not found + desc 'create namespace users-32755' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-32755 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-32755 ++ mktemp + local LAST_OUT=/tmp/tmp.hU7OiQMUyc ++ mktemp + local LAST_ERR=/tmp/tmp.TPqBn05WBX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-32755 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hU7OiQMUyc namespace/users-32755 created + cat /tmp/tmp.TPqBn05WBX + rm /tmp/tmp.hU7OiQMUyc /tmp/tmp.TPqBn05WBX + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.17n7EbXWng +++ mktemp ++ local LAST_ERR=/tmp/tmp.rZnM9iNU1A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.17n7EbXWng ++ cat /tmp/tmp.rZnM9iNU1A ++ rm /tmp/tmp.17n7EbXWng /tmp/tmp.rZnM9iNU1A ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1751-f9555a6f-10-cluster4 --namespace=users-32755 ++ mktemp + local LAST_OUT=/tmp/tmp.oEWi5izfIG ++ mktemp + local LAST_ERR=/tmp/tmp.paP16MWDFn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1751-f9555a6f-10-cluster4 --namespace=users-32755 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oEWi5izfIG Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1751-f9555a6f-10-cluster4" modified. + cat /tmp/tmp.paP16MWDFn + rm /tmp/tmp.oEWi5izfIG /tmp/tmp.paP16MWDFn + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.692HC278I5 ++ mktemp + local LAST_ERR=/tmp/tmp.lka08u3wtT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.692HC278I5 secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.lka08u3wtT + rm /tmp/tmp.692HC278I5 /tmp/tmp.lka08u3wtT + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.AE2I2syVdN ++ mktemp + local LAST_ERR=/tmp/tmp.8ZiZiQvw3A + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AE2I2syVdN secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.8ZiZiQvw3A + rm /tmp/tmp.AE2I2syVdN /tmp/tmp.8ZiZiQvw3A + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/client.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.Q6Sffq7uGL + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1751-f9555a6f#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + local LAST_ERR=/tmp/tmp.RgJhS40S6a + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-32755~ + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Q6Sffq7uGL deployment.apps/pxc-client created + cat /tmp/tmp.RgJhS40S6a + rm /tmp/tmp.Q6Sffq7uGL /tmp/tmp.RgJhS40S6a + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1751-f9555a6f#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_OUT=/tmp/tmp.yKIsbQtr3D + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-32755~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + local LAST_ERR=/tmp/tmp.9LmtlehCdA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yKIsbQtr3D perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.9LmtlehCdA + rm /tmp/tmp.yKIsbQtr3D /tmp/tmp.9LmtlehCdA + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.qOj9BV4sZR ++++ mktemp +++ local LAST_ERR=/tmp/tmp.mD8qxhHtsu +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.qOj9BV4sZR +++ cat /tmp/tmp.mD8qxhHtsu +++ rm /tmp/tmp.qOj9BV4sZR /tmp/tmp.mD8qxhHtsu +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.J80cI69wE3 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.4tJkv691fI +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.J80cI69wE3 +++ cat /tmp/tmp.4tJkv691fI +++ rm /tmp/tmp.J80cI69wE3 /tmp/tmp.4tJkv691fI +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-32755 ++ mktemp + local LAST_OUT=/tmp/tmp.a4xgWdPONw ++ mktemp + local LAST_ERR=/tmp/tmp.UtAQrkVBgl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-32755 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-32755 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-32755 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.a4xgWdPONw + cat /tmp/tmp.UtAQrkVBgl error: no matching resources found + rm /tmp/tmp.a4xgWdPONw /tmp/tmp.UtAQrkVBgl + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pvC6hXEAAb +++ mktemp ++ local LAST_ERR=/tmp/tmp.hmfpctuwTO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pvC6hXEAAb ++ cat /tmp/tmp.hmfpctuwTO ++ rm /tmp/tmp.pvC6hXEAAb /tmp/tmp.hmfpctuwTO ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c5daawwkwf +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZFYkQsPtMK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c5daawwkwf ++ cat /tmp/tmp.ZFYkQsPtMK ++ rm /tmp/tmp.c5daawwkwf /tmp/tmp.ZFYkQsPtMK ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SdIpLoAT3w +++ mktemp ++ local LAST_ERR=/tmp/tmp.BO61x5Y4Mb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SdIpLoAT3w ++ cat /tmp/tmp.BO61x5Y4Mb ++ rm /tmp/tmp.SdIpLoAT3w /tmp/tmp.BO61x5Y4Mb ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1.sql /tmp/tmp.6lxaPdqhMt/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fofQtd8qnS +++ mktemp ++ local LAST_ERR=/tmp/tmp.b6xIN32QBn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fofQtd8qnS ++ cat /tmp/tmp.b6xIN32QBn ++ rm /tmp/tmp.fofQtd8qnS /tmp/tmp.b6xIN32QBn ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1.sql /tmp/tmp.6lxaPdqhMt/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aL9BQYcmZM +++ mktemp ++ local LAST_ERR=/tmp/tmp.iTDMfh901r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aL9BQYcmZM ++ cat /tmp/tmp.iTDMfh901r ++ rm /tmp/tmp.aL9BQYcmZM /tmp/tmp.iTDMfh901r ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-1.sql /tmp/tmp.6lxaPdqhMt/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.g08je2X99n +++ mktemp ++ local LAST_ERR=/tmp/tmp.tSKsNYfkJM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.g08je2X99n ++ cat /tmp/tmp.tSKsNYfkJM Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.g08je2X99n /tmp/tmp.tSKsNYfkJM ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Wem3c8kUTI ++ mktemp + local LAST_ERR=/tmp/tmp.I0r8Ivg0rh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Wem3c8kUTI secret/my-cluster-secrets patched + cat /tmp/tmp.I0r8Ivg0rh + rm /tmp/tmp.Wem3c8kUTI /tmp/tmp.I0r8Ivg0rh + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J1uzZloSwR +++ mktemp ++ local LAST_ERR=/tmp/tmp.gzZp3f6brO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J1uzZloSwR ++ cat /tmp/tmp.gzZp3f6brO ++ rm /tmp/tmp.J1uzZloSwR /tmp/tmp.gzZp3f6brO ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.6lxaPdqhMt/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.HjbutodsDB ++ mktemp + local LAST_ERR=/tmp/tmp.o3NwtBqVjX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HjbutodsDB perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.o3NwtBqVjX + rm /tmp/tmp.HjbutodsDB /tmp/tmp.o3NwtBqVjX + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.33kly3dMuF +++ mktemp ++ local LAST_ERR=/tmp/tmp.d6UprwskWe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.33kly3dMuF ++ cat /tmp/tmp.d6UprwskWe ++ rm /tmp/tmp.33kly3dMuF /tmp/tmp.d6UprwskWe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fAslX8yzMV +++ mktemp ++ local LAST_ERR=/tmp/tmp.FQSwwzSX3U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fAslX8yzMV ++ cat /tmp/tmp.FQSwwzSX3U ++ rm /tmp/tmp.fAslX8yzMV /tmp/tmp.FQSwwzSX3U ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xepgT4lY6O +++ mktemp ++ local LAST_ERR=/tmp/tmp.GOCfWUQ0ID ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xepgT4lY6O ++ cat /tmp/tmp.GOCfWUQ0ID ++ rm /tmp/tmp.xepgT4lY6O /tmp/tmp.GOCfWUQ0ID ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.SjtPc32Th9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.HcjwO4XOxH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.SjtPc32Th9 +++++ cat /tmp/tmp.HcjwO4XOxH +++++ rm /tmp/tmp.SjtPc32Th9 /tmp/tmp.HcjwO4XOxH +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5KEtPSWzVu ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.huJXGhRpUS +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5KEtPSWzVu +++++ cat /tmp/tmp.huJXGhRpUS +++++ rm /tmp/tmp.5KEtPSWzVu /tmp/tmp.huJXGhRpUS +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B7sifBX89e +++ mktemp ++ local LAST_ERR=/tmp/tmp.myTmw0Zjwf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B7sifBX89e ++ cat /tmp/tmp.myTmw0Zjwf ++ rm /tmp/tmp.B7sifBX89e /tmp/tmp.myTmw0Zjwf ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.BZFPGzc9Jc ++ mktemp + local LAST_ERR=/tmp/tmp.myNWav1lLj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BZFPGzc9Jc secret/my-cluster-secrets patched + cat /tmp/tmp.myNWav1lLj + rm /tmp/tmp.BZFPGzc9Jc /tmp/tmp.myNWav1lLj + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IuAQ2dygWj +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z8ZQUPIfXz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IuAQ2dygWj ++ cat /tmp/tmp.Z8ZQUPIfXz ++ rm /tmp/tmp.IuAQ2dygWj /tmp/tmp.Z8ZQUPIfXz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0dPDacPG39 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jLIzKtVBvX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0dPDacPG39 ++ cat /tmp/tmp.jLIzKtVBvX ++ rm /tmp/tmp.0dPDacPG39 /tmp/tmp.jLIzKtVBvX ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UrvZcQVztF +++ mktemp ++ local LAST_ERR=/tmp/tmp.G0MEZPpekg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UrvZcQVztF ++ cat /tmp/tmp.G0MEZPpekg ++ rm /tmp/tmp.UrvZcQVztF /tmp/tmp.G0MEZPpekg ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.TmhUcwNbHq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vVf4DanbtX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.TmhUcwNbHq +++++ cat /tmp/tmp.vVf4DanbtX +++++ rm /tmp/tmp.TmhUcwNbHq /tmp/tmp.vVf4DanbtX +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.30TrFWNbtN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jgL7TSlxLB +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.30TrFWNbtN +++++ cat /tmp/tmp.jgL7TSlxLB +++++ rm /tmp/tmp.30TrFWNbtN /tmp/tmp.jgL7TSlxLB +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ArFrUPvPbh +++ mktemp ++ local LAST_ERR=/tmp/tmp.aLpS2JIZ7P ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ArFrUPvPbh ++ cat /tmp/tmp.aLpS2JIZ7P ++ rm /tmp/tmp.ArFrUPvPbh /tmp/tmp.aLpS2JIZ7P ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2.sql /tmp/tmp.6lxaPdqhMt/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2.sql /tmp/tmp.6lxaPdqhMt/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-2.sql /tmp/tmp.6lxaPdqhMt/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.RLi7O4fOz6 ++ mktemp + local LAST_ERR=/tmp/tmp.M9gvQy0k8b + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RLi7O4fOz6 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.M9gvQy0k8b + rm /tmp/tmp.RLi7O4fOz6 /tmp/tmp.M9gvQy0k8b + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.SSH25z2hIJ ++ mktemp + local LAST_ERR=/tmp/tmp.BrhzxKXbdn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SSH25z2hIJ secret/my-cluster-secrets patched + cat /tmp/tmp.BrhzxKXbdn + rm /tmp/tmp.SSH25z2hIJ /tmp/tmp.BrhzxKXbdn + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HcUFbGrrfU +++ mktemp ++ local LAST_ERR=/tmp/tmp.vCesT6Q5wz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HcUFbGrrfU ++ cat /tmp/tmp.vCesT6Q5wz ++ rm /tmp/tmp.HcUFbGrrfU /tmp/tmp.vCesT6Q5wz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.77zQj0mZTm +++ mktemp ++ local LAST_ERR=/tmp/tmp.C0C9HQWv3i ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.77zQj0mZTm ++ cat /tmp/tmp.C0C9HQWv3i ++ rm /tmp/tmp.77zQj0mZTm /tmp/tmp.C0C9HQWv3i ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.co6496I1sY +++ mktemp ++ local LAST_ERR=/tmp/tmp.FTeOOHye5y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.co6496I1sY ++ cat /tmp/tmp.FTeOOHye5y ++ rm /tmp/tmp.co6496I1sY /tmp/tmp.FTeOOHye5y ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JXTD6dJ919 +++ mktemp ++ local LAST_ERR=/tmp/tmp.TXkzISD9xe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JXTD6dJ919 ++ cat /tmp/tmp.TXkzISD9xe ++ rm /tmp/tmp.JXTD6dJ919 /tmp/tmp.TXkzISD9xe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.avKq6kIfrV +++ mktemp ++ local LAST_ERR=/tmp/tmp.X2rysMuYhQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.avKq6kIfrV ++ cat /tmp/tmp.X2rysMuYhQ ++ rm /tmp/tmp.avKq6kIfrV /tmp/tmp.X2rysMuYhQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.12EIjZO5mG +++ mktemp ++ local LAST_ERR=/tmp/tmp.wDjaFjeUv5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.12EIjZO5mG ++ cat /tmp/tmp.wDjaFjeUv5 ++ rm /tmp/tmp.12EIjZO5mG /tmp/tmp.wDjaFjeUv5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wSy8fq5bLE +++ mktemp ++ local LAST_ERR=/tmp/tmp.e9GcNBh4jn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wSy8fq5bLE ++ cat /tmp/tmp.e9GcNBh4jn ++ rm /tmp/tmp.wSy8fq5bLE /tmp/tmp.e9GcNBh4jn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.prmQzpmkot +++ mktemp ++ local LAST_ERR=/tmp/tmp.H9IpBJWPuz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.prmQzpmkot ++ cat /tmp/tmp.H9IpBJWPuz ++ rm /tmp/tmp.prmQzpmkot /tmp/tmp.H9IpBJWPuz ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.U00nwS18OR +++ mktemp ++ local LAST_ERR=/tmp/tmp.vDU4iXXfSk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.U00nwS18OR ++ cat /tmp/tmp.vDU4iXXfSk ++ rm /tmp/tmp.U00nwS18OR /tmp/tmp.vDU4iXXfSk ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Ck4SSXvL4q ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1mb8YdpcIo +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Ck4SSXvL4q +++++ cat /tmp/tmp.1mb8YdpcIo +++++ rm /tmp/tmp.Ck4SSXvL4q /tmp/tmp.1mb8YdpcIo +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.kQHTqYc6Oi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.cDB9EK1VyE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.kQHTqYc6Oi +++++ cat /tmp/tmp.cDB9EK1VyE +++++ rm /tmp/tmp.kQHTqYc6Oi /tmp/tmp.cDB9EK1VyE +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YKaEVrauXK +++ mktemp ++ local LAST_ERR=/tmp/tmp.JAnjSBq1yk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YKaEVrauXK ++ cat /tmp/tmp.JAnjSBq1yk ++ rm /tmp/tmp.YKaEVrauXK /tmp/tmp.JAnjSBq1yk ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-3.sql /tmp/tmp.6lxaPdqhMt/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.X9YlSePOBB ++ mktemp + local LAST_ERR=/tmp/tmp.KItpMLYG2M + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.X9YlSePOBB secret/my-cluster-secrets patched + cat /tmp/tmp.KItpMLYG2M + rm /tmp/tmp.X9YlSePOBB /tmp/tmp.KItpMLYG2M + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.gyaxsKNJQL +++ mktemp ++ local LAST_ERR=/tmp/tmp.YODp1oRIWh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gyaxsKNJQL ++ cat /tmp/tmp.YODp1oRIWh ++ rm /tmp/tmp.gyaxsKNJQL /tmp/tmp.YODp1oRIWh ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xIbMVzextm +++ mktemp ++ local LAST_ERR=/tmp/tmp.A3fzrA4Yn8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xIbMVzextm ++ cat /tmp/tmp.A3fzrA4Yn8 ++ rm /tmp/tmp.xIbMVzextm /tmp/tmp.A3fzrA4Yn8 ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zZybZudx8Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.H1ah6Vf1Gt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zZybZudx8Y ++ cat /tmp/tmp.H1ah6Vf1Gt ++ rm /tmp/tmp.zZybZudx8Y /tmp/tmp.H1ah6Vf1Gt ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.auCjHX4T1n +++ mktemp ++ local LAST_ERR=/tmp/tmp.x81xPGp7D3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.auCjHX4T1n ++ cat /tmp/tmp.x81xPGp7D3 ++ rm /tmp/tmp.auCjHX4T1n /tmp/tmp.x81xPGp7D3 ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F3Rofpl1yN +++ mktemp ++ local LAST_ERR=/tmp/tmp.2ES7PF99lG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F3Rofpl1yN ++ cat /tmp/tmp.2ES7PF99lG ++ rm /tmp/tmp.F3Rofpl1yN /tmp/tmp.2ES7PF99lG ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mhgY7rzobu +++ mktemp ++ local LAST_ERR=/tmp/tmp.0xiIiV2mrU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mhgY7rzobu ++ cat /tmp/tmp.0xiIiV2mrU ++ rm /tmp/tmp.mhgY7rzobu /tmp/tmp.0xiIiV2mrU ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7EUW92ePHe +++ mktemp ++ local LAST_ERR=/tmp/tmp.L1clgDkRGM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7EUW92ePHe ++ cat /tmp/tmp.L1clgDkRGM ++ rm /tmp/tmp.7EUW92ePHe /tmp/tmp.L1clgDkRGM ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z6nYE7vhCO +++ mktemp ++ local LAST_ERR=/tmp/tmp.f88qNyFSym ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z6nYE7vhCO ++ cat /tmp/tmp.f88qNyFSym ++ rm /tmp/tmp.Z6nYE7vhCO /tmp/tmp.f88qNyFSym ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ETqdQDXJtF +++ mktemp ++ local LAST_ERR=/tmp/tmp.CnDfaIC3zc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ETqdQDXJtF ++ cat /tmp/tmp.CnDfaIC3zc ++ rm /tmp/tmp.ETqdQDXJtF /tmp/tmp.CnDfaIC3zc ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sWgNkjO8qL +++ mktemp ++ local LAST_ERR=/tmp/tmp.9nRSJaZWjE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sWgNkjO8qL ++ cat /tmp/tmp.9nRSJaZWjE ++ rm /tmp/tmp.sWgNkjO8qL /tmp/tmp.9nRSJaZWjE ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mdPpztAqoj +++ mktemp ++ local LAST_ERR=/tmp/tmp.3mJslc6d1A ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mdPpztAqoj ++ cat /tmp/tmp.3mJslc6d1A ++ rm /tmp/tmp.mdPpztAqoj /tmp/tmp.3mJslc6d1A ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 9 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.790uz8v4EA +++ mktemp ++ local LAST_ERR=/tmp/tmp.SUOOsX7MNE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.790uz8v4EA ++ cat /tmp/tmp.SUOOsX7MNE ++ rm /tmp/tmp.790uz8v4EA /tmp/tmp.SUOOsX7MNE ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bYEOTKcCtk +++ mktemp ++ local LAST_ERR=/tmp/tmp.12CZrJa9jv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bYEOTKcCtk ++ cat /tmp/tmp.12CZrJa9jv ++ rm /tmp/tmp.bYEOTKcCtk /tmp/tmp.12CZrJa9jv ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IarSOCie6Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.eLD4KiJPtj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IarSOCie6Y ++ cat /tmp/tmp.eLD4KiJPtj ++ rm /tmp/tmp.IarSOCie6Y /tmp/tmp.eLD4KiJPtj ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.WVTQTvLxu2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.C5Sar3QoWG +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.WVTQTvLxu2 +++++ cat /tmp/tmp.C5Sar3QoWG +++++ rm /tmp/tmp.WVTQTvLxu2 /tmp/tmp.C5Sar3QoWG +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.cNnW6DD00I ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.TFSpzrW1Ac +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.cNnW6DD00I +++++ cat /tmp/tmp.TFSpzrW1Ac +++++ rm /tmp/tmp.cNnW6DD00I /tmp/tmp.TFSpzrW1Ac +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W2fZAM4zRc +++ mktemp ++ local LAST_ERR=/tmp/tmp.o2RlbtPg19 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W2fZAM4zRc ++ cat /tmp/tmp.o2RlbtPg19 ++ rm /tmp/tmp.W2fZAM4zRc /tmp/tmp.o2RlbtPg19 ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J28nJAQ7B5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.NWuTn4X3cb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J28nJAQ7B5 ++ cat /tmp/tmp.NWuTn4X3cb ++ rm /tmp/tmp.J28nJAQ7B5 /tmp/tmp.NWuTn4X3cb ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.6lxaPdqhMt/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.4EVHIOG7us ++ mktemp + local LAST_ERR=/tmp/tmp.TBpo7BZye7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4EVHIOG7us secret/my-cluster-secrets patched + cat /tmp/tmp.TBpo7BZye7 + rm /tmp/tmp.4EVHIOG7us /tmp/tmp.TBpo7BZye7 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B30NKj9zFp +++ mktemp ++ local LAST_ERR=/tmp/tmp.EVt5kRL26f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B30NKj9zFp ++ cat /tmp/tmp.EVt5kRL26f ++ rm /tmp/tmp.B30NKj9zFp /tmp/tmp.EVt5kRL26f ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4bMTBqjCl0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.QSMiEmo4sa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4bMTBqjCl0 ++ cat /tmp/tmp.QSMiEmo4sa ++ rm /tmp/tmp.4bMTBqjCl0 /tmp/tmp.QSMiEmo4sa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YspJKW87LT +++ mktemp ++ local LAST_ERR=/tmp/tmp.U6SGmejoJJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YspJKW87LT ++ cat /tmp/tmp.U6SGmejoJJ ++ rm /tmp/tmp.YspJKW87LT /tmp/tmp.U6SGmejoJJ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VQ6pvzQaJD +++ mktemp ++ local LAST_ERR=/tmp/tmp.V966uAbk99 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VQ6pvzQaJD ++ cat /tmp/tmp.V966uAbk99 ++ rm /tmp/tmp.VQ6pvzQaJD /tmp/tmp.V966uAbk99 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.YlugbfmvsP ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vxG8gYAWfU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.YlugbfmvsP +++++ cat /tmp/tmp.vxG8gYAWfU +++++ rm /tmp/tmp.YlugbfmvsP /tmp/tmp.vxG8gYAWfU +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FFD1htrWhX ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.F7y1QJjpjV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FFD1htrWhX +++++ cat /tmp/tmp.F7y1QJjpjV +++++ rm /tmp/tmp.FFD1htrWhX /tmp/tmp.F7y1QJjpjV +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AhOqe6eAOl +++ mktemp ++ local LAST_ERR=/tmp/tmp.5MnTdwTAp1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AhOqe6eAOl ++ cat /tmp/tmp.5MnTdwTAp1 ++ rm /tmp/tmp.AhOqe6eAOl /tmp/tmp.5MnTdwTAp1 ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kcsE4rBWiF +++ mktemp ++ local LAST_ERR=/tmp/tmp.8yb5vOjwrm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kcsE4rBWiF ++ cat /tmp/tmp.8yb5vOjwrm ++ rm /tmp/tmp.kcsE4rBWiF /tmp/tmp.8yb5vOjwrm ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.6lxaPdqhMt/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.4lFjtuYGnv ++ mktemp + local LAST_ERR=/tmp/tmp.IjRoEyyF6I + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.4lFjtuYGnv perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.IjRoEyyF6I + rm /tmp/tmp.4lFjtuYGnv /tmp/tmp.IjRoEyyF6I + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RcaGn7qKSt +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gn5mu9VqRb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RcaGn7qKSt ++ cat /tmp/tmp.Gn5mu9VqRb ++ rm /tmp/tmp.RcaGn7qKSt /tmp/tmp.Gn5mu9VqRb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uxCr1YBkr4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3hYAENKecq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uxCr1YBkr4 ++ cat /tmp/tmp.3hYAENKecq ++ rm /tmp/tmp.uxCr1YBkr4 /tmp/tmp.3hYAENKecq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jc0i1USuZN +++ mktemp ++ local LAST_ERR=/tmp/tmp.MUAldnZNlX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Jc0i1USuZN ++ cat /tmp/tmp.MUAldnZNlX ++ rm /tmp/tmp.Jc0i1USuZN /tmp/tmp.MUAldnZNlX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CrVTidSadr +++ mktemp ++ local LAST_ERR=/tmp/tmp.DOM1wpOaLk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CrVTidSadr ++ cat /tmp/tmp.DOM1wpOaLk ++ rm /tmp/tmp.CrVTidSadr /tmp/tmp.DOM1wpOaLk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wMNlWFcLsj +++ mktemp ++ local LAST_ERR=/tmp/tmp.m4nQKjr4a2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wMNlWFcLsj ++ cat /tmp/tmp.m4nQKjr4a2 ++ rm /tmp/tmp.wMNlWFcLsj /tmp/tmp.m4nQKjr4a2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tvzo056yit +++ mktemp ++ local LAST_ERR=/tmp/tmp.P6QHMbacKV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tvzo056yit ++ cat /tmp/tmp.P6QHMbacKV ++ rm /tmp/tmp.tvzo056yit /tmp/tmp.P6QHMbacKV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Nd4dqJJPNd +++ mktemp ++ local LAST_ERR=/tmp/tmp.nRdcMCPZlS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Nd4dqJJPNd ++ cat /tmp/tmp.nRdcMCPZlS ++ rm /tmp/tmp.Nd4dqJJPNd /tmp/tmp.nRdcMCPZlS ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hKxMYAziyq +++ mktemp ++ local LAST_ERR=/tmp/tmp.Gh3uWWv9oI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hKxMYAziyq ++ cat /tmp/tmp.Gh3uWWv9oI ++ rm /tmp/tmp.hKxMYAziyq /tmp/tmp.Gh3uWWv9oI ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.x2HB0zPMjT ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.uHafyksQ8J +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.x2HB0zPMjT +++++ cat /tmp/tmp.uHafyksQ8J +++++ rm /tmp/tmp.x2HB0zPMjT /tmp/tmp.uHafyksQ8J +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FaoZGM7qKD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.EAWy6zTfoX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FaoZGM7qKD +++++ cat /tmp/tmp.EAWy6zTfoX +++++ rm /tmp/tmp.FaoZGM7qKD /tmp/tmp.EAWy6zTfoX +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GHn49DX2M3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zKmgHMBLbr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GHn49DX2M3 ++ cat /tmp/tmp.zKmgHMBLbr ++ rm /tmp/tmp.GHn49DX2M3 /tmp/tmp.zKmgHMBLbr ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.IRkagBEmX5 ++ mktemp + local LAST_ERR=/tmp/tmp.amxzKgLT9O + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IRkagBEmX5 secret/my-cluster-secrets-2 patched + cat /tmp/tmp.amxzKgLT9O + rm /tmp/tmp.IRkagBEmX5 /tmp/tmp.amxzKgLT9O + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.V2Zo7eK1Ky +++ mktemp ++ local LAST_ERR=/tmp/tmp.PcgrbihlFX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.V2Zo7eK1Ky ++ cat /tmp/tmp.PcgrbihlFX ++ rm /tmp/tmp.V2Zo7eK1Ky /tmp/tmp.PcgrbihlFX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6CMiYskLjg +++ mktemp ++ local LAST_ERR=/tmp/tmp.m6NhAm6xsf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6CMiYskLjg ++ cat /tmp/tmp.m6NhAm6xsf ++ rm /tmp/tmp.6CMiYskLjg /tmp/tmp.m6NhAm6xsf ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cq7LjAdViY +++ mktemp ++ local LAST_ERR=/tmp/tmp.S90aM0FK8s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cq7LjAdViY ++ cat /tmp/tmp.S90aM0FK8s ++ rm /tmp/tmp.cq7LjAdViY /tmp/tmp.S90aM0FK8s ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xllUrEdhH1 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.DPq7bp0TmS +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xllUrEdhH1 +++++ cat /tmp/tmp.DPq7bp0TmS +++++ rm /tmp/tmp.xllUrEdhH1 /tmp/tmp.DPq7bp0TmS +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.3YCZt2t4Jm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.zeTBmg2mc1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.3YCZt2t4Jm +++++ cat /tmp/tmp.zeTBmg2mc1 +++++ rm /tmp/tmp.3YCZt2t4Jm /tmp/tmp.zeTBmg2mc1 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CyEhKP6MGD +++ mktemp ++ local LAST_ERR=/tmp/tmp.0C1sWKwhvR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CyEhKP6MGD ++ cat /tmp/tmp.0C1sWKwhvR ++ rm /tmp/tmp.CyEhKP6MGD /tmp/tmp.0C1sWKwhvR ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kqDYmr0nBc +++ mktemp ++ local LAST_ERR=/tmp/tmp.VSwxCKQOSJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kqDYmr0nBc ++ cat /tmp/tmp.VSwxCKQOSJ ++ rm /tmp/tmp.kqDYmr0nBc /tmp/tmp.VSwxCKQOSJ ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.6lxaPdqhMt/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.XmcOXWOdMq +++ mktemp ++ local LAST_ERR=/tmp/tmp.wxe6WvVxve ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XmcOXWOdMq ++ cat /tmp/tmp.wxe6WvVxve ++ rm /tmp/tmp.XmcOXWOdMq /tmp/tmp.wxe6WvVxve ++ return 0 + newpass='mm.jh@a-Vs+Juc}]qR' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''mm.jh@a-Vs+Juc}]qR'\'';' '-h some-name-pxc -uroot -p'\''mm.jh@a-Vs+Juc}]qR'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''mm.jh@a-Vs+Juc}]qR'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''mm.jh@a-Vs+Juc}]qR'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8Yt7gTEi5g +++ mktemp ++ local LAST_ERR=/tmp/tmp.cie72yJVjR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8Yt7gTEi5g ++ cat /tmp/tmp.cie72yJVjR ++ rm /tmp/tmp.8Yt7gTEi5g /tmp/tmp.cie72yJVjR ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''mm.jh@a-Vs+Juc}]qR'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''mm.jh@a-Vs+Juc}]qR'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''mm.jh@a-Vs+Juc}]qR'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''mm.jh@a-Vs+Juc}]qR'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4IbtljeSxS +++ mktemp ++ local LAST_ERR=/tmp/tmp.bmp6YapxM4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4IbtljeSxS ++ cat /tmp/tmp.bmp6YapxM4 ++ rm /tmp/tmp.4IbtljeSxS /tmp/tmp.bmp6YapxM4 ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.6lxaPdqhMt/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.SLFQQjbp26 +++ mktemp ++ local LAST_ERR=/tmp/tmp.53eyw8Eji2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SLFQQjbp26 ++ cat /tmp/tmp.53eyw8Eji2 ++ rm /tmp/tmp.SLFQQjbp26 /tmp/tmp.53eyw8Eji2 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.Kk1iAuZ59H ++ mktemp + local LAST_ERR=/tmp/tmp.Cmo2cj8wL5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Kk1iAuZ59H secret/my-cluster-secrets-2 configured + cat /tmp/tmp.Cmo2cj8wL5 Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.Kk1iAuZ59H /tmp/tmp.Cmo2cj8wL5 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w7PEIXUjND +++ mktemp ++ local LAST_ERR=/tmp/tmp.hi92o3HHog ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w7PEIXUjND ++ cat /tmp/tmp.hi92o3HHog ++ rm /tmp/tmp.w7PEIXUjND /tmp/tmp.hi92o3HHog ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.6lxaPdqhMt/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_OUT=/tmp/tmp.csf2OHl8H8 + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1751-f9555a6f#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-32755~ + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.h1ccpFXxAI + local exit_status=0 + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.csf2OHl8H8 perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.h1ccpFXxAI + rm /tmp/tmp.csf2OHl8H8 /tmp/tmp.h1ccpFXxAI + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2BgCL0zKvC +++ mktemp ++ local LAST_ERR=/tmp/tmp.UnHuO3ZGqi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2BgCL0zKvC ++ cat /tmp/tmp.UnHuO3ZGqi ++ rm /tmp/tmp.2BgCL0zKvC /tmp/tmp.UnHuO3ZGqi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.v6BNTlsHvw +++ mktemp ++ local LAST_ERR=/tmp/tmp.3Vw0SBEGbH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.v6BNTlsHvw ++ cat /tmp/tmp.3Vw0SBEGbH ++ rm /tmp/tmp.v6BNTlsHvw /tmp/tmp.3Vw0SBEGbH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tFnDNZTnZB +++ mktemp ++ local LAST_ERR=/tmp/tmp.A3s9KvqGEG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tFnDNZTnZB ++ cat /tmp/tmp.A3s9KvqGEG ++ rm /tmp/tmp.tFnDNZTnZB /tmp/tmp.A3s9KvqGEG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6o4jONNAOW +++ mktemp ++ local LAST_ERR=/tmp/tmp.7s9oMWxeX1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6o4jONNAOW ++ cat /tmp/tmp.7s9oMWxeX1 ++ rm /tmp/tmp.6o4jONNAOW /tmp/tmp.7s9oMWxeX1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K6AT4uufhA +++ mktemp ++ local LAST_ERR=/tmp/tmp.f6B7f7Qw8B ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K6AT4uufhA ++ cat /tmp/tmp.f6B7f7Qw8B ++ rm /tmp/tmp.K6AT4uufhA /tmp/tmp.f6B7f7Qw8B ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pqCsvdedpl +++ mktemp ++ local LAST_ERR=/tmp/tmp.nCGpVuf87J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pqCsvdedpl ++ cat /tmp/tmp.nCGpVuf87J ++ rm /tmp/tmp.pqCsvdedpl /tmp/tmp.nCGpVuf87J ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dEIQfHAbyV +++ mktemp ++ local LAST_ERR=/tmp/tmp.seFpj0A33q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dEIQfHAbyV ++ cat /tmp/tmp.seFpj0A33q ++ rm /tmp/tmp.dEIQfHAbyV /tmp/tmp.seFpj0A33q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XbMoCYvz11 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oTWNTbBR0c ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XbMoCYvz11 ++ cat /tmp/tmp.oTWNTbBR0c ++ rm /tmp/tmp.XbMoCYvz11 /tmp/tmp.oTWNTbBR0c ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.05bI0LnOq3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jiKDuaJm1V ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.05bI0LnOq3 ++ cat /tmp/tmp.jiKDuaJm1V ++ rm /tmp/tmp.05bI0LnOq3 /tmp/tmp.jiKDuaJm1V ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SJyzd6NBwz +++ mktemp ++ local LAST_ERR=/tmp/tmp.AcDkjp3cLO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SJyzd6NBwz ++ cat /tmp/tmp.AcDkjp3cLO ++ rm /tmp/tmp.SJyzd6NBwz /tmp/tmp.AcDkjp3cLO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Cq0qVNcbhz +++ mktemp ++ local LAST_ERR=/tmp/tmp.fpiluMa1R5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Cq0qVNcbhz ++ cat /tmp/tmp.fpiluMa1R5 ++ rm /tmp/tmp.Cq0qVNcbhz /tmp/tmp.fpiluMa1R5 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x8UvOwNcaa +++ mktemp ++ local LAST_ERR=/tmp/tmp.tAw8mZF1nW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x8UvOwNcaa ++ cat /tmp/tmp.tAw8mZF1nW ++ rm /tmp/tmp.x8UvOwNcaa /tmp/tmp.tAw8mZF1nW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZCZjyK7gOR +++ mktemp ++ local LAST_ERR=/tmp/tmp.9Tfo08EP6L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZCZjyK7gOR ++ cat /tmp/tmp.9Tfo08EP6L ++ rm /tmp/tmp.ZCZjyK7gOR /tmp/tmp.9Tfo08EP6L ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XAYRp68U0L +++ mktemp ++ local LAST_ERR=/tmp/tmp.a9eJrym4zt ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XAYRp68U0L ++ cat /tmp/tmp.a9eJrym4zt ++ rm /tmp/tmp.XAYRp68U0L /tmp/tmp.a9eJrym4zt ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JLiZ0ASSNb +++ mktemp ++ local LAST_ERR=/tmp/tmp.EgF2yb2wO1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JLiZ0ASSNb ++ cat /tmp/tmp.EgF2yb2wO1 ++ rm /tmp/tmp.JLiZ0ASSNb /tmp/tmp.EgF2yb2wO1 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zGSNktY6AP +++ mktemp ++ local LAST_ERR=/tmp/tmp.VYBpspb1h2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zGSNktY6AP ++ cat /tmp/tmp.VYBpspb1h2 ++ rm /tmp/tmp.zGSNktY6AP /tmp/tmp.VYBpspb1h2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.auIp7sbxsD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.qwfFhGy5Sh +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.auIp7sbxsD +++++ cat /tmp/tmp.qwfFhGy5Sh +++++ rm /tmp/tmp.auIp7sbxsD /tmp/tmp.qwfFhGy5Sh +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zJMcduevJX +++ mktemp ++ local LAST_ERR=/tmp/tmp.m0mUh9O1FI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zJMcduevJX ++ cat /tmp/tmp.m0mUh9O1FI ++ rm /tmp/tmp.zJMcduevJX /tmp/tmp.m0mUh9O1FI ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.XSmfsyMbMM ++ mktemp + local LAST_ERR=/tmp/tmp.qRlVeDufc9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XSmfsyMbMM secret/my-cluster-secrets patched + cat /tmp/tmp.qRlVeDufc9 + rm /tmp/tmp.XSmfsyMbMM /tmp/tmp.qRlVeDufc9 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T5XAUpqlh4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.biT4x7JCjn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T5XAUpqlh4 ++ cat /tmp/tmp.biT4x7JCjn ++ rm /tmp/tmp.T5XAUpqlh4 /tmp/tmp.biT4x7JCjn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mpJSjgSlGD +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZOFssCCHRk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mpJSjgSlGD ++ cat /tmp/tmp.ZOFssCCHRk ++ rm /tmp/tmp.mpJSjgSlGD /tmp/tmp.ZOFssCCHRk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UW6ZL36Egq +++ mktemp ++ local LAST_ERR=/tmp/tmp.V1OYX1nmP4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UW6ZL36Egq ++ cat /tmp/tmp.V1OYX1nmP4 ++ rm /tmp/tmp.UW6ZL36Egq /tmp/tmp.V1OYX1nmP4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.elbLmfXBZV +++ mktemp ++ local LAST_ERR=/tmp/tmp.FZjZ2ldwDr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.elbLmfXBZV ++ cat /tmp/tmp.FZjZ2ldwDr ++ rm /tmp/tmp.elbLmfXBZV /tmp/tmp.FZjZ2ldwDr ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kQqCDQqPPQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.VfcVkhgVUP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kQqCDQqPPQ ++ cat /tmp/tmp.VfcVkhgVUP ++ rm /tmp/tmp.kQqCDQqPPQ /tmp/tmp.VfcVkhgVUP ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8EDozajaFT ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.e73D9cPhns +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8EDozajaFT +++++ cat /tmp/tmp.e73D9cPhns +++++ rm /tmp/tmp.8EDozajaFT /tmp/tmp.e73D9cPhns +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aACStepxy3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.p5NcvoAIfp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aACStepxy3 ++ cat /tmp/tmp.p5NcvoAIfp ++ rm /tmp/tmp.aACStepxy3 /tmp/tmp.p5NcvoAIfp ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hyyezJ21yl +++ mktemp ++ local LAST_ERR=/tmp/tmp.kOOVBNWZDm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hyyezJ21yl ++ cat /tmp/tmp.kOOVBNWZDm ++ rm /tmp/tmp.hyyezJ21yl /tmp/tmp.kOOVBNWZDm ++ return 0 + client_pod=pxc-client-6644d8898f-zvwkh + wait_pod pxc-client-6644d8898f-zvwkh + local pod=pxc-client-6644d8898f-zvwkh + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-zvwkh ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-6644d8898f-zvwkh condition met pxc-client-6644d8898f-zvwkh.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.6lxaPdqhMt/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1751/e2e-tests/users/compare/select-3.sql /tmp/tmp.6lxaPdqhMt/select-3.sql + destroy users-32755 + local namespace=users-32755 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.6lxaPdqhMt/operator.log + sort -u +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.yhoBwl6qHq +++ mktemp ++ local LAST_ERR=/tmp/tmp.7E1999EGQ8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yhoBwl6qHq ++ cat /tmp/tmp.7E1999EGQ8 ++ rm /tmp/tmp.yhoBwl6qHq /tmp/tmp.7E1999EGQ8 ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-7888f9874f-8zpx8 ++ mktemp + local LAST_OUT=/tmp/tmp.EaeJzbTPwd ++ mktemp + local LAST_ERR=/tmp/tmp.NvqvguxDG7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-7888f9874f-8zpx8 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EaeJzbTPwd + cat /tmp/tmp.NvqvguxDG7 + rm /tmp/tmp.EaeJzbTPwd /tmp/tmp.NvqvguxDG7 + return 0 2024-07-08T17:29:04.660Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1469001"} 2024-07-08T17:29:04.661Z INFO setup Manager starting up {"gitCommit": "f9555a6f491f6c26fd875c06770cb4529b8f4776", "gitBranch": "PR-1751-f9555a6f", "buildTime": "2024-07-08T15:33:40Z", "goVersion": "go1.22.5", "os": "linux", "arch": "amd64"} 2024-07-08T17:29:04.661Z INFO setup Registering Components. 2024-07-08T17:29:06.647Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-07-08T17:29:06.683Z INFO controller-runtime.metrics Starting metrics server 2024-07-08T17:29:06.683Z INFO setup Starting the Cmd. 2024-07-08T17:29:06.683Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-07-08T17:29:06.684Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-07-08T17:29:06.684Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-07-08T17:29:06.684Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-07-08T17:29:06.684Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-07-08T17:29:06.684Z INFO controller-runtime.webhook Starting webhook server 2024-07-08T17:29:06.785Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-07-08T17:29:06.873Z DEBUG events percona-xtradb-cluster-operator-7888f9874f-8zpx8_33fca153-4657-47fd-8f41-eb502150d78d became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"89848b69-66c3-4402-9b9c-d14cdd2bc8bf","apiVersion":"coordination.k8s.io/v1","resourceVersion":"61162"}, "reason": "LeaderElection"} 2024-07-08T17:29:06.873Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-07-08T17:29:06.873Z INFO Starting Controller {"controller": "pxc-controller"} 2024-07-08T17:29:06.873Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-07-08T17:29:06.873Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-07-08T17:29:06.873Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-07-08T17:29:06.873Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-07-08T17:29:06.873Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-07-08T17:29:06.987Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-07-08T17:29:06.994Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-07-08T17:29:06.994Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-07-08T17:29:44.874Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "01eee082-b163-46bb-864d-d69426680a7a", "version": "1.15.0"} 2024-07-08T17:31:04.621Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "6fa270ef-f995-495e-a775-0bcb5458c101", "user": "operator"} 2024-07-08T17:31:04.661Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "6fa270ef-f995-495e-a775-0bcb5458c101", "user": "monitor"} 2024-07-08T17:31:04.758Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "6fa270ef-f995-495e-a775-0bcb5458c101"} 2024-07-08T17:31:04.802Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "6fa270ef-f995-495e-a775-0bcb5458c101"} 2024-07-08T17:31:04.840Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "6fa270ef-f995-495e-a775-0bcb5458c101", "user": "xtrabackup"} 2024-07-08T17:31:04.901Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "6fa270ef-f995-495e-a775-0bcb5458c101"} 2024-07-08T17:31:04.939Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "6fa270ef-f995-495e-a775-0bcb5458c101", "user": "replication"} 2024-07-08T17:31:05.158Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "6fa270ef-f995-495e-a775-0bcb5458c101", "err": "get primary pxc pod: not found"} 2024-07-08T17:31:09.701Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "4ba69d05-8b75-4415-bd59-587c7113a3c3", "err": "get primary pxc pod: not found"} 2024-07-08T17:31:14.935Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "f51e8e5a-f06f-49a6-b234-0cd34bb69d7d", "err": "get primary pxc pod: not found"} 2024-07-08T17:31:20.145Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "4d2df43f-ccdb-4ee4-b667-1615bc9225ec", "err": "get primary pxc pod: not found"} 2024-07-08T17:33:28.557Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "3a4e9b1f-788d-40e6-b578-85d5ab7dde30", "user": "root"} 2024-07-08T17:33:28.830Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "3a4e9b1f-788d-40e6-b578-85d5ab7dde30", "new version": "8.0.36-28.1"} 2024-07-08T17:33:32.219Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "3a4e9b1f-788d-40e6-b578-85d5ab7dde30"} 2024-07-08T17:33:37.015Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "4a84fe26-820d-423b-893e-2045a8614cf5"} 2024-07-08T17:33:42.403Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "9b36277b-db1d-4067-bbeb-4b269557b394"} 2024-07-08T17:33:47.952Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "526d06aa-8dbe-4348-b5ca-6724769b07e4"} 2024-07-08T17:33:53.312Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "93396e34-249d-4ee7-a1d5-de5265d8aa61"} 2024-07-08T17:33:58.635Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "12dbd587-41c0-4ded-8677-d2745c1df12d"} 2024-07-08T17:34:04.026Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "5912b9cd-5a4c-4e6f-a2db-7ba0c3bfa1ce"} 2024-07-08T17:34:09.811Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "0ad60155-1934-4b2b-a176-04e16383d192"} 2024-07-08T17:34:15.012Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "62b0d83e-30ce-4348-a38c-bdb268eb60c8"} 2024-07-08T17:34:20.523Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "07a44bfc-362c-4e97-91e9-093397ccf68d"} 2024-07-08T17:34:26.007Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "9670b32e-5865-46ff-851a-4a0d1a7260f8"} 2024-07-08T17:34:31.449Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "4bac58eb-6b32-4c34-a704-178a7237ad5c"} 2024-07-08T17:34:36.912Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "5eb3edd6-fb56-4c28-b8d3-b9e0f4855bc5"} 2024-07-08T17:34:38.852Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "505221fe-eb7d-4b7c-b021-5fcdf9265174", "user": "root"} 2024-07-08T17:34:38.900Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "505221fe-eb7d-4b7c-b021-5fcdf9265174", "user": "root"} 2024-07-08T17:34:38.907Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "505221fe-eb7d-4b7c-b021-5fcdf9265174", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T17:34:44.804Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "505221fe-eb7d-4b7c-b021-5fcdf9265174"} 2024-07-08T17:34:44.815Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "505221fe-eb7d-4b7c-b021-5fcdf9265174", "user": "root"} 2024-07-08T17:34:44.869Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "505221fe-eb7d-4b7c-b021-5fcdf9265174", "user": "root"} 2024-07-08T17:34:48.902Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "505221fe-eb7d-4b7c-b021-5fcdf9265174"} 2024-07-08T17:34:53.702Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "bd2ef18e-cce9-4c82-b172-ee06cfab7756"} 2024-07-08T17:34:58.465Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "7efe391c-c874-46f4-b8a6-23ac770cd839", "error": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:35:22.734Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "73c7fc63-0f21-448c-ac64-b238384c0e2f", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:35:28.024Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "1c107d7b-bf0d-46fe-9792-639067911050", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:35:33.236Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "59c8508a-8727-41f2-a096-45650d39361e", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:35:40.222Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "ff56aa9b-bba9-4694-8ec5-9df96339490e", "error": "exec syncusers: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-32755.svc.cluster.local:3306) to ProxySQL\nCannot add the user (replication). The user (replication) already exists in ProxySQL database with different hostgroup.\nAdding user to ProxySQL: operator\n / ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:715) : Failed to add the user (operator) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / \nSyncing user accounts from PXC(some-name-pxc-0.some-name-pxc.users-32755.svc.cluster.local:3306) to ProxySQL\nCannot add the user (replication). The user (replication) already exists in ProxySQL database with different hostgroup.\nAdding user to ProxySQL: operator\n / ERROR 1045 (28000) at line 1: ProxySQL Admin Error: UNIQUE constraint failed: mysql_users.username, mysql_users.frontend\nERROR (line:715) : Failed to add the user (operator) from PXC to ProxySQL database. \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:35:45.286Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "7bcb6f97-1cb2-4a92-85a6-ca8fc5a3b866", "user": "proxyadmin"} 2024-07-08T17:35:45.286Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "7bcb6f97-1cb2-4a92-85a6-ca8fc5a3b866", "user": "proxyadmin"} 2024-07-08T17:35:45.361Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "7bcb6f97-1cb2-4a92-85a6-ca8fc5a3b866", "user": "proxyadmin"} 2024-07-08T17:35:45.374Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "7bcb6f97-1cb2-4a92-85a6-ca8fc5a3b866", "user": "proxyadmin"} 2024-07-08T17:35:45.374Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "7bcb6f97-1cb2-4a92-85a6-ca8fc5a3b866", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-07-08T17:35:45.680Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "7bcb6f97-1cb2-4a92-85a6-ca8fc5a3b866", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:36:13.022Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "6c50cdf7-960c-4b48-8d28-d808dc3f6c65"} 2024-07-08T17:36:23.333Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "ec4f88a2-9ee8-44be-9783-a8ef32c303d2"} 2024-07-08T17:36:28.950Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "697188b1-1b96-41ce-a56a-47c4ee75bd7c"} 2024-07-08T17:36:34.626Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "2bf00f8c-e944-4092-806d-f8c9adf25a39"} 2024-07-08T17:36:40.087Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "13b204d8-bd5d-44ed-9872-0c132fea6226", "user": "xtrabackup"} 2024-07-08T17:36:40.117Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "13b204d8-bd5d-44ed-9872-0c132fea6226", "user": "xtrabackup"} 2024-07-08T17:36:40.130Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "13b204d8-bd5d-44ed-9872-0c132fea6226", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T17:36:40.144Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "13b204d8-bd5d-44ed-9872-0c132fea6226", "user": "xtrabackup"} 2024-07-08T17:36:40.178Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "13b204d8-bd5d-44ed-9872-0c132fea6226", "user": "xtrabackup"} 2024-07-08T17:36:40.196Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "13b204d8-bd5d-44ed-9872-0c132fea6226", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-07-08T17:36:40.333Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "3dab5fd3-6654-48ad-a598-0f7ea6698a88"} 2024-07-08T17:37:38.879Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "8e03a768-72f8-4b2c-8589-664f4d3db9c5", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-32755 on 10.25.224.10:53: no such host"} 2024-07-08T17:37:44.185Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "721de453-c9a0-4ee9-8e0d-e5c930ec5024", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.27.74.28:33062: connect: connection refused"} 2024-07-08T17:38:32.185Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "53e9eba8-f780-4c12-a7a2-5efde0601e7e", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.27.73.30:33062: connect: connection refused"} 2024-07-08T17:38:37.407Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "a21e1ae5-f7bc-4192-90c5-787f0b49936b", "primary name": "some-name-pxc-0.some-name-pxc.users-32755.svc.cluster.local"} 2024-07-08T17:38:42.621Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "98561ee8-5a4d-4e60-9078-a4982cd4f87b", "primary name": "some-name-pxc-0.some-name-pxc.users-32755.svc.cluster.local"} 2024-07-08T17:38:47.877Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "5d9a4c74-780b-47a2-a4fe-b6d08e76614c", "primary name": "some-name-pxc-0.some-name-pxc.users-32755.svc.cluster.local"} 2024-07-08T17:38:53.082Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "496ee106-b811-49cf-be21-b18e3a3e97ef", "primary name": "some-name-pxc-0.some-name-pxc.users-32755.svc.cluster.local"} 2024-07-08T17:38:58.262Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "9fa161ab-316b-4c04-ad65-c03b5759d8f5", "primary name": "some-name-pxc-0.some-name-pxc.users-32755.svc.cluster.local"} 2024-07-08T17:39:23.228Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "1baac5f5-e1ac-4d19-96c1-9a64856066a4"} 2024-07-08T17:39:28.130Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "28576386-2dec-4bd9-9206-354dd9c703ef"} 2024-07-08T17:39:33.534Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "525416af-fe83-4223-b96f-8c5f92fd8fc9"} 2024-07-08T17:39:35.426Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "815d678a-0730-40d0-b573-026fe7954939", "user": "monitor"} 2024-07-08T17:39:35.457Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "815d678a-0730-40d0-b573-026fe7954939", "user": "monitor"} 2024-07-08T17:39:35.777Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "815d678a-0730-40d0-b573-026fe7954939", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T17:39:35.826Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "815d678a-0730-40d0-b573-026fe7954939", "user": "monitor"} 2024-07-08T17:39:36.068Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "815d678a-0730-40d0-b573-026fe7954939", "user": "monitor"} 2024-07-08T17:39:36.602Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "815d678a-0730-40d0-b573-026fe7954939", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-07-08T17:39:39.259Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "815d678a-0730-40d0-b573-026fe7954939", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:40:35.364Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "05d0db09-f6fc-4ca7-985c-f86d19b1cd24", "user": "monitor"} 2024-07-08T17:40:36.478Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "05d0db09-f6fc-4ca7-985c-f86d19b1cd24", "user": "monitor"} 2024-07-08T17:40:36.508Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "05d0db09-f6fc-4ca7-985c-f86d19b1cd24", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-07-08T17:40:39.820Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "05d0db09-f6fc-4ca7-985c-f86d19b1cd24"} 2024-07-08T17:40:43.737Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "76ad8aed-19f5-4229-ad17-7b952593a741"} 2024-07-08T17:40:49.235Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "d94c51b7-1b07-4a18-9beb-ed6d705f477f"} 2024-07-08T17:40:55.360Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "617dd8c8-21d7-4e17-872e-ad0c75242c03"} 2024-07-08T17:41:00.613Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "b118953d-27b6-4bdc-8601-60002a0dbf56"} 2024-07-08T17:41:05.730Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "63938360-9ddc-4d53-b0ac-4683ec25a509"} 2024-07-08T17:41:07.712Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "390768f9-66ce-4958-874c-5c9e7e8e5a20", "user": "operator"} 2024-07-08T17:41:07.744Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "390768f9-66ce-4958-874c-5c9e7e8e5a20", "user": "operator"} 2024-07-08T17:41:07.753Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "390768f9-66ce-4958-874c-5c9e7e8e5a20", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T17:41:07.761Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "390768f9-66ce-4958-874c-5c9e7e8e5a20", "user": "operator"} 2024-07-08T17:41:07.798Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "390768f9-66ce-4958-874c-5c9e7e8e5a20", "user": "operator"} 2024-07-08T17:41:07.841Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "390768f9-66ce-4958-874c-5c9e7e8e5a20", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T17:41:09.101Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "390768f9-66ce-4958-874c-5c9e7e8e5a20", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-32755.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-32755.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-32755.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-32755.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-32755.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-32755.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:42:04.128Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "da89b34f-987e-4e77-beb9-0b810912be59"} 2024-07-08T17:42:12.937Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "3b148d05-706d-490f-a6a8-65716a7e9c96"} 2024-07-08T17:42:18.410Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "82a17948-69d9-4231-af73-c875838a135f"} 2024-07-08T17:42:23.735Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "88e9828c-e175-415e-9bf0-277e137d2470"} 2024-07-08T17:42:28.204Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "secrets": "my-cluster-secrets-2"} 2024-07-08T17:42:28.216Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "root"} 2024-07-08T17:42:28.268Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "root"} 2024-07-08T17:42:28.276Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T17:42:29.794Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "042db78b-1ecc-4248-8d17-8f085fe4aa28"} 2024-07-08T17:42:32.429Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c"} 2024-07-08T17:42:32.441Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "root"} 2024-07-08T17:42:32.490Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "root"} 2024-07-08T17:42:32.504Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "operator"} 2024-07-08T17:42:32.542Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "operator"} 2024-07-08T17:42:32.552Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T17:42:32.565Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "operator"} 2024-07-08T17:42:32.594Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "operator"} 2024-07-08T17:42:32.610Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "monitor"} 2024-07-08T17:42:32.641Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "monitor"} 2024-07-08T17:42:32.653Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T17:42:32.698Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "monitor"} 2024-07-08T17:42:32.711Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "monitor"} 2024-07-08T17:42:32.790Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "xtrabackup"} 2024-07-08T17:42:32.823Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "xtrabackup"} 2024-07-08T17:42:32.834Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T17:42:32.846Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "xtrabackup"} 2024-07-08T17:42:32.877Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "xtrabackup"} 2024-07-08T17:42:32.890Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "replication"} 2024-07-08T17:42:32.919Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "replication"} 2024-07-08T17:42:32.929Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-08T17:42:32.943Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "replication"} 2024-07-08T17:42:32.980Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "replication"} 2024-07-08T17:42:32.980Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "proxyadmin"} 2024-07-08T17:42:33.024Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "proxyadmin"} 2024-07-08T17:42:33.035Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "user": "proxyadmin"} 2024-07-08T17:42:33.035Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "last-applied-secret": "14a131e0f6831fdc7f5057c6a785f08b8b5c6365aa958e60b001ae0ff146033d"} 2024-07-08T17:42:33.035Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "last-applied-secret": "14a131e0f6831fdc7f5057c6a785f08b8b5c6365aa958e60b001ae0ff146033d"} 2024-07-08T17:42:33.263Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e399ffb8-a995-42fd-9d50-d887c10a4a1c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:42:33.596Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "2f0f2ea9-d7da-417f-8bfd-4f9e643cfade", "user": "monitor"} 2024-07-08T17:42:33.910Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "2f0f2ea9-d7da-417f-8bfd-4f9e643cfade", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:43:31.398Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "ffaee6a1-52ae-453c-9a95-952c45aec970", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-32755 on 10.25.224.10:53: no such host"} 2024-07-08T17:44:08.516Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "6be2a2b5-468f-4277-bbb0-a7477805cf2b", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-32755 on 10.25.224.10:53: no such host"} 2024-07-08T17:44:13.803Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "549b7b08-cb94-42da-9ec5-eb719c8fa913", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-32755 on 10.25.224.10:53: no such host"} 2024-07-08T17:44:19.059Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "e656c75d-2f28-4c4c-a06a-37c0ab5b4112", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-32755 on 10.25.224.10:53: no such host"} 2024-07-08T17:44:29.502Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "854c75ce-b03a-4704-95c2-3432ade122f1", "primary name": "some-name-pxc-0.some-name-pxc.users-32755.svc.cluster.local"} 2024-07-08T17:44:34.689Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "5ee02b3d-c463-4c97-81cd-6b44ac051ae4", "primary name": "some-name-pxc-0.some-name-pxc.users-32755.svc.cluster.local"} 2024-07-08T17:44:39.862Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "483db261-1f62-497b-81d7-484aa8edd0f8", "primary name": "some-name-pxc-0.some-name-pxc.users-32755.svc.cluster.local"} 2024-07-08T17:44:50.453Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "6be76e93-17e2-4c01-9821-949197e34013", "primary name": "some-name-pxc-0.some-name-pxc.users-32755.svc.cluster.local"} 2024-07-08T17:44:56.737Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "ad4a4791-0dd7-46f7-b89a-83eef8f4dcec", "user": "monitor"} 2024-07-08T17:44:56.975Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "ad4a4791-0dd7-46f7-b89a-83eef8f4dcec", "user": "monitor"} 2024-07-08T17:44:57.001Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "ad4a4791-0dd7-46f7-b89a-83eef8f4dcec", "last-applied-secret": "14a131e0f6831fdc7f5057c6a785f08b8b5c6365aa958e60b001ae0ff146033d"} 2024-07-08T17:45:00.362Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "ad4a4791-0dd7-46f7-b89a-83eef8f4dcec"} 2024-07-08T17:45:05.022Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "21056a95-c408-417b-aafe-e0daaeb7a048"} 2024-07-08T17:45:10.608Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "7645d71e-8bf1-42a5-aa06-77079d83482d"} 2024-07-08T17:45:15.741Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "801a91ba-ae39-4931-bbc7-349c1ed85f5e"} 2024-07-08T17:45:17.832Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "bf7a2d35-cb2a-499a-86de-e5bf6d87efb3", "user": "operator"} 2024-07-08T17:45:17.864Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "bf7a2d35-cb2a-499a-86de-e5bf6d87efb3", "user": "operator"} 2024-07-08T17:45:17.876Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "bf7a2d35-cb2a-499a-86de-e5bf6d87efb3", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T17:45:17.887Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "bf7a2d35-cb2a-499a-86de-e5bf6d87efb3", "user": "operator"} 2024-07-08T17:45:17.921Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "bf7a2d35-cb2a-499a-86de-e5bf6d87efb3", "user": "operator"} 2024-07-08T17:45:17.961Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "bf7a2d35-cb2a-499a-86de-e5bf6d87efb3", "last-applied-secret": "20fb0ead502d8774ade20b32d62cc9039e34472ee21c7411645357c736cff282"} 2024-07-08T17:45:19.214Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "bf7a2d35-cb2a-499a-86de-e5bf6d87efb3", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-32755.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-32755.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-32755.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-32755.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-32755.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-32755.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:45:56.561Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "d212faa3-941c-4615-8b75-93381f1b598d"} 2024-07-08T17:46:05.144Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "7f4789c6-f24c-495f-9fef-84b4adc1290b"} 2024-07-08T17:46:10.955Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "4ebad804-d92a-40f9-940f-9cb4d7b2c74d"} 2024-07-08T17:46:16.137Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "d7a41a96-3338-4589-894b-02cf40b67eee"} 2024-07-08T17:46:21.621Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "7230bfd3-08e5-453d-80a2-9c3f7c92031c"} 2024-07-08T17:46:26.719Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "31a841c1-486a-46c1-8d83-49ce0ac0d28c"} 2024-07-08T17:46:32.942Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "f1f5efd4-1190-4b6b-8836-59713f37702c"} 2024-07-08T17:46:38.258Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "8599842b-a987-4793-9f90-92decd98e854"} 2024-07-08T17:46:42.820Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "217676fb-9c44-4225-bbc8-289eba1abd85"} 2024-07-08T17:46:48.221Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "20bbcd3f-7b37-4f60-9fa9-7435ed047809"} 2024-07-08T17:46:54.323Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "de66d02d-07d7-4d02-ba7e-ed9cd9f4cb67"} 2024-07-08T17:47:00.320Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "4e87664c-4420-4092-8023-24889d1749a7"} 2024-07-08T17:47:05.744Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "3908b052-f545-4dfa-86ea-908eccba9cca"} 2024-07-08T17:47:11.023Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "8e82a5b6-fcd0-4b62-a436-8ec550b3bb46"} 2024-07-08T17:47:16.416Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "720ecfc9-78f0-4168-9c2f-81f5438a3267"} 2024-07-08T17:47:18.320Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "root"} 2024-07-08T17:47:18.365Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "root"} 2024-07-08T17:47:18.386Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T17:47:23.471Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66"} 2024-07-08T17:47:23.485Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "root"} 2024-07-08T17:47:23.534Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "root"} 2024-07-08T17:47:23.562Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "monitor"} 2024-07-08T17:47:23.599Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "monitor"} 2024-07-08T17:47:23.608Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T17:47:23.651Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "monitor"} 2024-07-08T17:47:23.663Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "monitor"} 2024-07-08T17:47:23.742Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "xtrabackup"} 2024-07-08T17:47:23.776Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "xtrabackup"} 2024-07-08T17:47:23.789Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T17:47:23.798Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "xtrabackup"} 2024-07-08T17:47:23.832Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "xtrabackup"} 2024-07-08T17:47:23.850Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "proxyadmin"} 2024-07-08T17:47:23.895Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "proxyadmin"} 2024-07-08T17:47:23.909Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "user": "proxyadmin"} 2024-07-08T17:47:23.909Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "last-applied-secret": "8bd9e594649eefe8d2d512c958cca8de7e25b75bd50d73ae9218606c3ead6098"} 2024-07-08T17:47:23.909Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "last-applied-secret": "8bd9e594649eefe8d2d512c958cca8de7e25b75bd50d73ae9218606c3ead6098"} 2024-07-08T17:47:24.086Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "130072fa-69fe-4813-9e24-06ec753edc66", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1265\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-08T17:47:41.005Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 30451058-90f2-4a70-8225-e1df6f4ed657 2024-07-08T17:49:47.813Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "3878c655-56c2-475a-9741-b0bab557a4b5", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-32755 on 10.25.224.10:53: no such host"} 2024-07-08T17:49:58.073Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "77c5eee4-864e-4f29-b4fd-66c21c3bcce5", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-32755 on 10.25.224.10:53: no such host"} 2024-07-08T17:50:34.811Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "root"} 2024-07-08T17:50:34.856Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "root"} 2024-07-08T17:50:34.872Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "secret": "some-name-mysql-init", "user": "root"} 2024-07-08T17:50:34.882Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "root"} 2024-07-08T17:50:34.930Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "root"} 2024-07-08T17:50:34.941Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "operator"} 2024-07-08T17:50:34.977Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "operator"} 2024-07-08T17:50:34.992Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-08T17:50:35.023Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "operator"} 2024-07-08T17:50:35.052Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "operator"} 2024-07-08T17:50:35.066Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "monitor"} 2024-07-08T17:50:35.094Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "monitor"} 2024-07-08T17:50:35.105Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T17:50:35.117Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "monitor"} 2024-07-08T17:50:35.242Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "xtrabackup"} 2024-07-08T17:50:35.275Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "xtrabackup"} 2024-07-08T17:50:35.284Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-08T17:50:35.295Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "xtrabackup"} 2024-07-08T17:50:35.330Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "xtrabackup"} 2024-07-08T17:50:35.343Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "replication"} 2024-07-08T17:50:35.376Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "replication"} 2024-07-08T17:50:35.387Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-08T17:50:35.402Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "replication"} 2024-07-08T17:50:35.428Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "user": "replication"} 2024-07-08T17:50:35.428Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T17:50:35.428Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "19fddb04-f538-4aac-be42-94b454cf939f", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T17:51:33.723Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "3b3df0fc-3368-4ae8-ba7a-9c9933aa152d", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.27.74.37:33062: connect: connection refused"} 2024-07-08T17:52:52.828Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "fac0a057-6ec2-4640-9297-79bd7c194e5b", "user": "monitor"} 2024-07-08T17:52:53.076Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "fac0a057-6ec2-4640-9297-79bd7c194e5b", "user": "monitor"} 2024-07-08T17:52:53.101Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "fac0a057-6ec2-4640-9297-79bd7c194e5b", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-08T17:53:03.154Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "fa60cca6-b43a-4a17-a2fa-b8730b807c93", "user": "monitor"} 2024-07-08T17:53:03.184Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "fa60cca6-b43a-4a17-a2fa-b8730b807c93", "user": "monitor"} 2024-07-08T17:53:03.198Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "fa60cca6-b43a-4a17-a2fa-b8730b807c93", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-08T17:53:03.208Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "fa60cca6-b43a-4a17-a2fa-b8730b807c93", "user": "monitor"} 2024-07-08T17:53:03.337Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "fa60cca6-b43a-4a17-a2fa-b8730b807c93", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} 2024-07-08T17:54:08.635Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "4849c5e3-8e7f-4c0c-9a66-e11d675210b0", "user": "monitor"} 2024-07-08T17:54:13.649Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "1c071102-93f3-47c2-a4c4-2acbd81557e0", "user": "monitor"} 2024-07-08T17:54:13.887Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "1c071102-93f3-47c2-a4c4-2acbd81557e0", "user": "monitor"} 2024-07-08T17:54:13.913Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-32755", "name": "some-name", "reconcileID": "1c071102-93f3-47c2-a4c4-2acbd81557e0", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1267 [mysql] 2024/07/08 17:38:32 connection.go:49: read tcp 10.27.74.25:41604->10.27.73.30:33062: read: connection reset by peer sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-32755 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.GtCcZzH2Cg ++ mktemp + local LAST_ERR=/tmp/tmp.hSrRpEBrBW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.GtCcZzH2Cg perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.hSrRpEBrBW + rm /tmp/tmp.GtCcZzH2Cg /tmp/tmp.hSrRpEBrBW + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.FLmRueGjYA ++ mktemp + local LAST_ERR=/tmp/tmp.KUfSSUvyHL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FLmRueGjYA No resources found + cat /tmp/tmp.KUfSSUvyHL + rm /tmp/tmp.FLmRueGjYA /tmp/tmp.KUfSSUvyHL + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HLPSkKRp2V ++ mktemp + local LAST_ERR=/tmp/tmp.UCiMw8bpwe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HLPSkKRp2V No resources found + cat /tmp/tmp.UCiMw8bpwe + rm /tmp/tmp.HLPSkKRp2V /tmp/tmp.UCiMw8bpwe + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.2r8bg1IRoa ++ mktemp + local LAST_ERR=/tmp/tmp.ImAke73TOx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2r8bg1IRoa validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.ImAke73TOx + rm /tmp/tmp.2r8bg1IRoa /tmp/tmp.ImAke73TOx + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-32755 + rm -rf /tmp/tmp.6lxaPdqhMt + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.3rn0g7dnPr ++ mktemp + desc 'test passed' + local LAST_OUT=/tmp/tmp.w0rFA0hPQa + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_ERR=/tmp/tmp.Dh6OHgWT4L + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.HPCEVo8nXG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-32755 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator