Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/logs/users-8-0.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-23355 + local ns=users-23355 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-28480 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HbvTS5sOdw ++ mktemp + local LAST_ERR=/tmp/tmp.K4lnrJ43bK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HbvTS5sOdw perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.K4lnrJ43bK + rm /tmp/tmp.HbvTS5sOdw /tmp/tmp.K4lnrJ43bK + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.500W23d8Yu ++ mktemp + local LAST_ERR=/tmp/tmp.IJbDjWy77x + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.500W23d8Yu No resources found + cat /tmp/tmp.IJbDjWy77x + rm /tmp/tmp.500W23d8Yu /tmp/tmp.IJbDjWy77x + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.rnfCs1nP4k ++ mktemp + local LAST_ERR=/tmp/tmp.fpaDpTeBAH + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.rnfCs1nP4k No resources found + cat /tmp/tmp.fpaDpTeBAH + rm /tmp/tmp.rnfCs1nP4k /tmp/tmp.fpaDpTeBAH + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ awk '-F ' '{print $2}' ++ tail -n1 ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.FtcIbbPwiv ++ mktemp + local LAST_OUT=/tmp/tmp.xQIqo8krJB ++ mktemp + local LAST_ERR=/tmp/tmp.xdVVW9ZdZD + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.r8LYoRack8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xQIqo8krJB + cat /tmp/tmp.r8LYoRack8 + rm /tmp/tmp.xQIqo8krJB /tmp/tmp.r8LYoRack8 + return 0 namespace "users-28480" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FtcIbbPwiv namespace "pxc-operator" deleted + cat /tmp/tmp.xdVVW9ZdZD + rm /tmp/tmp.FtcIbbPwiv /tmp/tmp.xdVVW9ZdZD + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.su6nRnu0u3 ++ mktemp + local LAST_ERR=/tmp/tmp.PutizzOdPE + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.su6nRnu0u3 namespace/pxc-operator created + cat /tmp/tmp.PutizzOdPE + rm /tmp/tmp.su6nRnu0u3 /tmp/tmp.PutizzOdPE + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.PWkaeNfXMB +++ mktemp ++ local LAST_ERR=/tmp/tmp.8lNAjibjeZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PWkaeNfXMB ++ cat /tmp/tmp.8lNAjibjeZ ++ rm /tmp/tmp.PWkaeNfXMB /tmp/tmp.8lNAjibjeZ ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1743-51af0517-14-cluster7 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.DvaOXMGJfv ++ mktemp + local LAST_ERR=/tmp/tmp.LmFW7qPh0Q + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1743-51af0517-14-cluster7 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.DvaOXMGJfv Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1743-51af0517-14-cluster7" modified. + cat /tmp/tmp.LmFW7qPh0Q + rm /tmp/tmp.DvaOXMGJfv /tmp/tmp.LmFW7qPh0Q + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.pwIMRuECAg ++ mktemp + local LAST_ERR=/tmp/tmp.K4atNCSbtp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pwIMRuECAg customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.K4atNCSbtp + rm /tmp/tmp.pwIMRuECAg /tmp/tmp.K4atNCSbtp + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.6EFKterUim ++ mktemp + local LAST_ERR=/tmp/tmp.nCNCjWC9rG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6EFKterUim clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.nCNCjWC9rG + rm /tmp/tmp.6EFKterUim /tmp/tmp.nCNCjWC9rG + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1743-51af0517^' ++ mktemp + local LAST_OUT=/tmp/tmp.hXh3tZsxCe ++ mktemp + local LAST_ERR=/tmp/tmp.XCppPu46Uh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.hXh3tZsxCe deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.XCppPu46Uh + rm /tmp/tmp.hXh3tZsxCe /tmp/tmp.XCppPu46Uh + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.XK3d9iWU8i ++ mktemp + local LAST_ERR=/tmp/tmp.lWaTPtnMAA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XK3d9iWU8i pod/percona-xtradb-cluster-operator-675fc7d9f5-z6gxg condition met + cat /tmp/tmp.lWaTPtnMAA + rm /tmp/tmp.XK3d9iWU8i /tmp/tmp.lWaTPtnMAA + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.38Di0U0eCu +++ mktemp ++ local LAST_ERR=/tmp/tmp.CY0IdWYA7x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.38Di0U0eCu ++ cat /tmp/tmp.CY0IdWYA7x ++ rm /tmp/tmp.38Di0U0eCu /tmp/tmp.CY0IdWYA7x ++ return 0 + wait_pod percona-xtradb-cluster-operator-675fc7d9f5-z6gxg 480 pxc-operator + local pod=percona-xtradb-cluster-operator-675fc7d9f5-z6gxg + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-675fc7d9f5-z6gxg ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-675fc7d9f5-z6gxg condition met percona-xtradb-cluster-operator-675fc7d9f5-z6gxg.Ok + sleep 3 + create_namespace users-23355 + local namespace=users-23355 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ awk '{print $1}' ++ grep chaos-mesh + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces users-23355' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-23355 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-23355 ++ mktemp ++ mktemp + awk '{print$1}' + local LAST_OUT=/tmp/tmp.wFwgBA6ZSo ++ mktemp + local LAST_OUT=/tmp/tmp.Ebz8p2ZaxM ++ mktemp + local LAST_ERR=/tmp/tmp.ji3n2ALNEH + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.9WnSSkpJIi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-23355 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wFwgBA6ZSo + cat /tmp/tmp.ji3n2ALNEH + rm /tmp/tmp.wFwgBA6ZSo /tmp/tmp.ji3n2ALNEH + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-23355 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-23355 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.Ebz8p2ZaxM + cat /tmp/tmp.9WnSSkpJIi Error from server (NotFound): namespaces "users-23355" not found + rm /tmp/tmp.Ebz8p2ZaxM /tmp/tmp.9WnSSkpJIi + return 1 + : + wait_for_delete namespace/users-23355 + local res=namespace/users-23355 + echo -n 'namespace/users-23355 - ' namespace/users-23355 - + set +o xtrace Error from server (NotFound): namespaces "users-23355" not found + desc 'create namespace users-23355' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-23355 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-23355 ++ mktemp + local LAST_OUT=/tmp/tmp.TnFMsVfPON ++ mktemp + local LAST_ERR=/tmp/tmp.6PFpCHdNGm + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-23355 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TnFMsVfPON namespace/users-23355 created + cat /tmp/tmp.6PFpCHdNGm + rm /tmp/tmp.TnFMsVfPON /tmp/tmp.6PFpCHdNGm + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.QibPb37ggL +++ mktemp ++ local LAST_ERR=/tmp/tmp.itcVyXHYYg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QibPb37ggL ++ cat /tmp/tmp.itcVyXHYYg ++ rm /tmp/tmp.QibPb37ggL /tmp/tmp.itcVyXHYYg ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1743-51af0517-14-cluster7 --namespace=users-23355 ++ mktemp + local LAST_OUT=/tmp/tmp.sIgY5HkLww ++ mktemp + local LAST_ERR=/tmp/tmp.fOCxgy13Xi + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1743-51af0517-14-cluster7 --namespace=users-23355 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.sIgY5HkLww Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1743-51af0517-14-cluster7" modified. + cat /tmp/tmp.fOCxgy13Xi + rm /tmp/tmp.sIgY5HkLww /tmp/tmp.fOCxgy13Xi + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.IzrbNcmw8j ++ mktemp + local LAST_ERR=/tmp/tmp.CAGvBhh5fV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.IzrbNcmw8j secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.CAGvBhh5fV + rm /tmp/tmp.IzrbNcmw8j /tmp/tmp.CAGvBhh5fV + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.E1oAdxNpww ++ mktemp + local LAST_ERR=/tmp/tmp.yWEZlJpS8k + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.E1oAdxNpww secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.yWEZlJpS8k + rm /tmp/tmp.E1oAdxNpww /tmp/tmp.yWEZlJpS8k + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/client.yml + kubectl_bin apply -f - + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + local LAST_OUT=/tmp/tmp.RcLCylm7IO + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-23355~ + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1743-51af0517#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_ERR=/tmp/tmp.pQ7tnwcl1i + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RcLCylm7IO deployment.apps/pxc-client created + cat /tmp/tmp.pQ7tnwcl1i + rm /tmp/tmp.RcLCylm7IO /tmp/tmp.pQ7tnwcl1i + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.Piwl9vmRE3 + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-23355~ + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1743-51af0517#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.sQ05ELgfsO + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Piwl9vmRE3 perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.sQ05ELgfsO + rm /tmp/tmp.Piwl9vmRE3 /tmp/tmp.sQ05ELgfsO + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.CigTCZHBgj ++++ mktemp +++ local LAST_ERR=/tmp/tmp.VKHu2bpn3m +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.CigTCZHBgj +++ cat /tmp/tmp.VKHu2bpn3m +++ rm /tmp/tmp.CigTCZHBgj /tmp/tmp.VKHu2bpn3m +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.xuKt7aOTAK ++++ mktemp +++ local LAST_ERR=/tmp/tmp.MLpTUzmzhT +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.xuKt7aOTAK +++ cat /tmp/tmp.MLpTUzmzhT +++ rm /tmp/tmp.xuKt7aOTAK /tmp/tmp.MLpTUzmzhT +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-23355 ++ mktemp + local LAST_OUT=/tmp/tmp.e7g2ATT8vD ++ mktemp + local LAST_ERR=/tmp/tmp.Jnf4TudAb0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-23355 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-23355 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-23355 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.e7g2ATT8vD + cat /tmp/tmp.Jnf4TudAb0 error: no matching resources found + rm /tmp/tmp.e7g2ATT8vD /tmp/tmp.Jnf4TudAb0 + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GzUpnFSZRk +++ mktemp ++ local LAST_ERR=/tmp/tmp.es9P6FDUW9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GzUpnFSZRk ++ cat /tmp/tmp.es9P6FDUW9 ++ rm /tmp/tmp.GzUpnFSZRk /tmp/tmp.es9P6FDUW9 ++ return 0 + client_pod=pxc-client-6644d8898f-2m8j9 + wait_pod pxc-client-6644d8898f-2m8j9 + local pod=pxc-client-6644d8898f-2m8j9 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-2m8j9 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-2m8j9 condition met pxc-client-6644d8898f-2m8j9.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5oFTv0xWZY +++ mktemp ++ local LAST_ERR=/tmp/tmp.BlXhnyC2ya ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5oFTv0xWZY ++ cat /tmp/tmp.BlXhnyC2ya ++ rm /tmp/tmp.5oFTv0xWZY /tmp/tmp.BlXhnyC2ya ++ return 0 + client_pod=pxc-client-6644d8898f-2m8j9 + wait_pod pxc-client-6644d8898f-2m8j9 + local pod=pxc-client-6644d8898f-2m8j9 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-2m8j9 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-2m8j9 condition met pxc-client-6644d8898f-2m8j9.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jxu4KDnMHY +++ mktemp ++ local LAST_ERR=/tmp/tmp.r491cnC99e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jxu4KDnMHY ++ cat /tmp/tmp.r491cnC99e ++ rm /tmp/tmp.jxu4KDnMHY /tmp/tmp.r491cnC99e ++ return 0 + client_pod=pxc-client-6644d8898f-2m8j9 + wait_pod pxc-client-6644d8898f-2m8j9 + local pod=pxc-client-6644d8898f-2m8j9 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-2m8j9 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-2m8j9 condition met pxc-client-6644d8898f-2m8j9.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-1.sql /tmp/tmp.ocCIpgbhE8/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7yRhCJ50mY +++ mktemp ++ local LAST_ERR=/tmp/tmp.YmnCpDIB3y ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7yRhCJ50mY ++ cat /tmp/tmp.YmnCpDIB3y ++ rm /tmp/tmp.7yRhCJ50mY /tmp/tmp.YmnCpDIB3y ++ return 0 + client_pod=pxc-client-6644d8898f-2m8j9 + wait_pod pxc-client-6644d8898f-2m8j9 + local pod=pxc-client-6644d8898f-2m8j9 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-2m8j9 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-2m8j9 condition met pxc-client-6644d8898f-2m8j9.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-1.sql /tmp/tmp.ocCIpgbhE8/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-1-80.sql ']' + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.klyfY6eGsc +++ mktemp ++ local LAST_ERR=/tmp/tmp.Lav1BgcUmW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.klyfY6eGsc ++ cat /tmp/tmp.Lav1BgcUmW ++ rm /tmp/tmp.klyfY6eGsc /tmp/tmp.Lav1BgcUmW ++ return 0 + client_pod=pxc-client-6644d8898f-2m8j9 + wait_pod pxc-client-6644d8898f-2m8j9 + local pod=pxc-client-6644d8898f-2m8j9 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-2m8j9 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-2m8j9 condition met pxc-client-6644d8898f-2m8j9.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-1.sql /tmp/tmp.ocCIpgbhE8/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DGvDruaTeW +++ mktemp ++ local LAST_ERR=/tmp/tmp.3WJJR3UCF4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DGvDruaTeW ++ cat /tmp/tmp.3WJJR3UCF4 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.DGvDruaTeW /tmp/tmp.3WJJR3UCF4 ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ffed9VAYhd ++ mktemp + local LAST_ERR=/tmp/tmp.cRqmQVXolK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ffed9VAYhd secret/my-cluster-secrets patched + cat /tmp/tmp.cRqmQVXolK + rm /tmp/tmp.ffed9VAYhd /tmp/tmp.cRqmQVXolK + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zDeJXg2Dqu +++ mktemp ++ local LAST_ERR=/tmp/tmp.PHPWKKvfZG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zDeJXg2Dqu ++ cat /tmp/tmp.PHPWKKvfZG ++ rm /tmp/tmp.zDeJXg2Dqu /tmp/tmp.PHPWKKvfZG ++ return 0 + client_pod=pxc-client-6644d8898f-2m8j9 + wait_pod pxc-client-6644d8898f-2m8j9 + local pod=pxc-client-6644d8898f-2m8j9 + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-2m8j9 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-2m8j9 condition met pxc-client-6644d8898f-2m8j9.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.ocCIpgbhE8/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.3GqZrSoVX2 ++ mktemp + local LAST_ERR=/tmp/tmp.xV7rgnUEH1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3GqZrSoVX2 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.xV7rgnUEH1 + rm /tmp/tmp.3GqZrSoVX2 /tmp/tmp.xV7rgnUEH1 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P6MwMAE8zH +++ mktemp ++ local LAST_ERR=/tmp/tmp.rYJA7cuMa0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P6MwMAE8zH ++ cat /tmp/tmp.rYJA7cuMa0 ++ rm /tmp/tmp.P6MwMAE8zH /tmp/tmp.rYJA7cuMa0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tdl5RfBq2a +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xi15jkVprn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tdl5RfBq2a ++ cat /tmp/tmp.Xi15jkVprn ++ rm /tmp/tmp.tdl5RfBq2a /tmp/tmp.Xi15jkVprn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VlwTenihN0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Zph2U9kJTv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VlwTenihN0 ++ cat /tmp/tmp.Zph2U9kJTv ++ rm /tmp/tmp.VlwTenihN0 /tmp/tmp.Zph2U9kJTv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m2a5YgsKGs +++ mktemp ++ local LAST_ERR=/tmp/tmp.yrVlBE2gSU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m2a5YgsKGs ++ cat /tmp/tmp.yrVlBE2gSU ++ rm /tmp/tmp.m2a5YgsKGs /tmp/tmp.yrVlBE2gSU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gYm551ncIO +++ mktemp ++ local LAST_ERR=/tmp/tmp.H7YCZM0ygG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gYm551ncIO ++ cat /tmp/tmp.H7YCZM0ygG ++ rm /tmp/tmp.gYm551ncIO /tmp/tmp.H7YCZM0ygG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MRNcFSibHW +++ mktemp ++ local LAST_ERR=/tmp/tmp.nnVvWNPsGq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MRNcFSibHW ++ cat /tmp/tmp.nnVvWNPsGq ++ rm /tmp/tmp.MRNcFSibHW /tmp/tmp.nnVvWNPsGq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8s4khjgega +++ mktemp ++ local LAST_ERR=/tmp/tmp.CmBq1eVxQM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8s4khjgega ++ cat /tmp/tmp.CmBq1eVxQM ++ rm /tmp/tmp.8s4khjgega /tmp/tmp.CmBq1eVxQM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yB1kTBq4Hg +++ mktemp ++ local LAST_ERR=/tmp/tmp.hs7QkCNcRl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yB1kTBq4Hg ++ cat /tmp/tmp.hs7QkCNcRl ++ rm /tmp/tmp.yB1kTBq4Hg /tmp/tmp.hs7QkCNcRl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ldoK4krHLO +++ mktemp ++ local LAST_ERR=/tmp/tmp.HucedZGdSe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ldoK4krHLO ++ cat /tmp/tmp.HucedZGdSe ++ rm /tmp/tmp.ldoK4krHLO /tmp/tmp.HucedZGdSe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ajuiFEO8rr +++ mktemp ++ local LAST_ERR=/tmp/tmp.CPgAKreTwH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ajuiFEO8rr ++ cat /tmp/tmp.CPgAKreTwH ++ rm /tmp/tmp.ajuiFEO8rr /tmp/tmp.CPgAKreTwH ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CZreYLe720 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KynSzDhbqM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CZreYLe720 ++ cat /tmp/tmp.KynSzDhbqM ++ rm /tmp/tmp.CZreYLe720 /tmp/tmp.KynSzDhbqM ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.sa1t6MXAui ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.9W8hBbAEUt +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.sa1t6MXAui +++++ cat /tmp/tmp.9W8hBbAEUt +++++ rm /tmp/tmp.sa1t6MXAui /tmp/tmp.9W8hBbAEUt +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6f8qsXwCGx ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.g83VHXcxAx +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6f8qsXwCGx +++++ cat /tmp/tmp.g83VHXcxAx +++++ rm /tmp/tmp.6f8qsXwCGx /tmp/tmp.g83VHXcxAx +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mmQvcdjFzB +++ mktemp ++ local LAST_ERR=/tmp/tmp.1rZDDZs1RM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mmQvcdjFzB ++ cat /tmp/tmp.1rZDDZs1RM ++ rm /tmp/tmp.mmQvcdjFzB /tmp/tmp.1rZDDZs1RM ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.YBDe98txVI ++ mktemp + local LAST_ERR=/tmp/tmp.Vac1QnOBE0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YBDe98txVI secret/my-cluster-secrets patched + cat /tmp/tmp.Vac1QnOBE0 + rm /tmp/tmp.YBDe98txVI /tmp/tmp.Vac1QnOBE0 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fATN6EvQs6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Kw5JDueXVW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fATN6EvQs6 ++ cat /tmp/tmp.Kw5JDueXVW ++ rm /tmp/tmp.fATN6EvQs6 /tmp/tmp.Kw5JDueXVW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6m5lVoSY2T +++ mktemp ++ local LAST_ERR=/tmp/tmp.giDanpdkgJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6m5lVoSY2T ++ cat /tmp/tmp.giDanpdkgJ ++ rm /tmp/tmp.6m5lVoSY2T /tmp/tmp.giDanpdkgJ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xYSJSXYsaP +++ mktemp ++ local LAST_ERR=/tmp/tmp.9lPCzOb51G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xYSJSXYsaP ++ cat /tmp/tmp.9lPCzOb51G ++ rm /tmp/tmp.xYSJSXYsaP /tmp/tmp.9lPCzOb51G ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.btiPOWYzyN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.2nVGjXH0SP +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.btiPOWYzyN +++++ cat /tmp/tmp.2nVGjXH0SP +++++ rm /tmp/tmp.btiPOWYzyN /tmp/tmp.2nVGjXH0SP +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.PQXuJprikB ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.PyQCIokIBN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.PQXuJprikB +++++ cat /tmp/tmp.PyQCIokIBN +++++ rm /tmp/tmp.PQXuJprikB /tmp/tmp.PyQCIokIBN +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eSayFOZ0XK +++ mktemp ++ local LAST_ERR=/tmp/tmp.WDASna2MDk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eSayFOZ0XK ++ cat /tmp/tmp.WDASna2MDk ++ rm /tmp/tmp.eSayFOZ0XK /tmp/tmp.WDASna2MDk ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-2.sql /tmp/tmp.ocCIpgbhE8/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-2.sql /tmp/tmp.ocCIpgbhE8/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-2-80.sql ']' + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-2.sql /tmp/tmp.ocCIpgbhE8/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ypisZiARrw ++ mktemp + local LAST_ERR=/tmp/tmp.9INoDlecQp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ypisZiARrw perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.9INoDlecQp + rm /tmp/tmp.ypisZiARrw /tmp/tmp.9INoDlecQp + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.KTJ2bWrBBw ++ mktemp + local LAST_ERR=/tmp/tmp.i99eWwpubr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KTJ2bWrBBw secret/my-cluster-secrets patched + cat /tmp/tmp.i99eWwpubr + rm /tmp/tmp.KTJ2bWrBBw /tmp/tmp.i99eWwpubr + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dwjOgHjAd1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.AGyJj1nD0H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dwjOgHjAd1 ++ cat /tmp/tmp.AGyJj1nD0H ++ rm /tmp/tmp.dwjOgHjAd1 /tmp/tmp.AGyJj1nD0H ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zfKdbBhmkX +++ mktemp ++ local LAST_ERR=/tmp/tmp.mnV0rP15pM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zfKdbBhmkX ++ cat /tmp/tmp.mnV0rP15pM ++ rm /tmp/tmp.zfKdbBhmkX /tmp/tmp.mnV0rP15pM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X7wDiM2yBD +++ mktemp ++ local LAST_ERR=/tmp/tmp.Tl6mmD0rQR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X7wDiM2yBD ++ cat /tmp/tmp.Tl6mmD0rQR ++ rm /tmp/tmp.X7wDiM2yBD /tmp/tmp.Tl6mmD0rQR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2uBst6r0Ta +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xa7luNz3bC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2uBst6r0Ta ++ cat /tmp/tmp.Xa7luNz3bC ++ rm /tmp/tmp.2uBst6r0Ta /tmp/tmp.Xa7luNz3bC ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Iwy6WFemG8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.6S0U0K3t7t ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Iwy6WFemG8 ++ cat /tmp/tmp.6S0U0K3t7t ++ rm /tmp/tmp.Iwy6WFemG8 /tmp/tmp.6S0U0K3t7t ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NADVzjDE1B +++ mktemp ++ local LAST_ERR=/tmp/tmp.85kHsNmHOv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NADVzjDE1B ++ cat /tmp/tmp.85kHsNmHOv ++ rm /tmp/tmp.NADVzjDE1B /tmp/tmp.85kHsNmHOv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WxoLJoR2Ob +++ mktemp ++ local LAST_ERR=/tmp/tmp.HdnAiIjShm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WxoLJoR2Ob ++ cat /tmp/tmp.HdnAiIjShm ++ rm /tmp/tmp.WxoLJoR2Ob /tmp/tmp.HdnAiIjShm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j7AIqvZqG7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ihxXlKRKDs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j7AIqvZqG7 ++ cat /tmp/tmp.ihxXlKRKDs ++ rm /tmp/tmp.j7AIqvZqG7 /tmp/tmp.ihxXlKRKDs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DshQ7FM5i1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.LxD8flro8K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DshQ7FM5i1 ++ cat /tmp/tmp.LxD8flro8K ++ rm /tmp/tmp.DshQ7FM5i1 /tmp/tmp.LxD8flro8K ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uFhZYyKthK +++ mktemp ++ local LAST_ERR=/tmp/tmp.0bSXApeBYR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uFhZYyKthK ++ cat /tmp/tmp.0bSXApeBYR ++ rm /tmp/tmp.uFhZYyKthK /tmp/tmp.0bSXApeBYR ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.IZyoch59ps ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mpxXswSe5Y +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.IZyoch59ps +++++ cat /tmp/tmp.mpxXswSe5Y +++++ rm /tmp/tmp.IZyoch59ps /tmp/tmp.mpxXswSe5Y +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qLLXOgQ8RF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mJJMGblAeJ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qLLXOgQ8RF +++++ cat /tmp/tmp.mJJMGblAeJ +++++ rm /tmp/tmp.qLLXOgQ8RF /tmp/tmp.mJJMGblAeJ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TYOonISaIB +++ mktemp ++ local LAST_ERR=/tmp/tmp.8CkFa9cLsC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TYOonISaIB ++ cat /tmp/tmp.8CkFa9cLsC ++ rm /tmp/tmp.TYOonISaIB /tmp/tmp.8CkFa9cLsC ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-3.sql /tmp/tmp.ocCIpgbhE8/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.QfW5EfPH4V ++ mktemp + local LAST_ERR=/tmp/tmp.NPTRHNfajR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.QfW5EfPH4V secret/my-cluster-secrets patched + cat /tmp/tmp.NPTRHNfajR + rm /tmp/tmp.QfW5EfPH4V /tmp/tmp.NPTRHNfajR + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.dIvbInLzp1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.WrayOn7U3j ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dIvbInLzp1 ++ cat /tmp/tmp.WrayOn7U3j ++ rm /tmp/tmp.dIvbInLzp1 /tmp/tmp.WrayOn7U3j ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + retry=0 + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Rd2qeMZQK +++ mktemp ++ local LAST_ERR=/tmp/tmp.2t2OtL2fIK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Rd2qeMZQK ++ cat /tmp/tmp.2t2OtL2fIK ++ rm /tmp/tmp.3Rd2qeMZQK /tmp/tmp.2t2OtL2fIK ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + echo 'waiting for password update' waiting for password update + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_password_updated monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep additional_password + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tv8CULe9E5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZYUeonoDNV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tv8CULe9E5 ++ cat /tmp/tmp.ZYUeonoDNV ++ rm /tmp/tmp.tv8CULe9E5 /tmp/tmp.ZYUeonoDNV ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace {"additional_password": "*C1F414D9BAF378B656A849B31F9F8AF3125F558B"} + retry=0 + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JZRxfUCbzh +++ mktemp ++ local LAST_ERR=/tmp/tmp.WN7zaIThql ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JZRxfUCbzh ++ cat /tmp/tmp.WN7zaIThql ++ rm /tmp/tmp.JZRxfUCbzh /tmp/tmp.WN7zaIThql ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 1 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gPb3U7AKN6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4mqj1o1E6S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gPb3U7AKN6 ++ cat /tmp/tmp.4mqj1o1E6S ++ rm /tmp/tmp.gPb3U7AKN6 /tmp/tmp.4mqj1o1E6S ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 2 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fpoV2YHhcp +++ mktemp ++ local LAST_ERR=/tmp/tmp.4XadSFEZEG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fpoV2YHhcp ++ cat /tmp/tmp.4XadSFEZEG ++ rm /tmp/tmp.fpoV2YHhcp /tmp/tmp.4XadSFEZEG ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 3 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.GhUR2lQBmt +++ mktemp ++ local LAST_ERR=/tmp/tmp.AqYWfdAMoe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.GhUR2lQBmt ++ cat /tmp/tmp.AqYWfdAMoe ++ rm /tmp/tmp.GhUR2lQBmt /tmp/tmp.AqYWfdAMoe ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 4 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aNQWVLgONT +++ mktemp ++ local LAST_ERR=/tmp/tmp.VUCcS4iIDG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aNQWVLgONT ++ cat /tmp/tmp.VUCcS4iIDG ++ rm /tmp/tmp.aNQWVLgONT /tmp/tmp.VUCcS4iIDG ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 5 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lnui09hw6U +++ mktemp ++ local LAST_ERR=/tmp/tmp.1KiMIj9n9b ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lnui09hw6U ++ cat /tmp/tmp.1KiMIj9n9b ++ rm /tmp/tmp.lnui09hw6U /tmp/tmp.1KiMIj9n9b ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 6 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.mEwlubHbfu +++ mktemp ++ local LAST_ERR=/tmp/tmp.RRZ3vhb83r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mEwlubHbfu ++ cat /tmp/tmp.RRZ3vhb83r ++ rm /tmp/tmp.mEwlubHbfu /tmp/tmp.RRZ3vhb83r ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 7 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + grep NULL + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0fWjxfcCLp +++ mktemp ++ local LAST_ERR=/tmp/tmp.ltD7GbxuUd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0fWjxfcCLp ++ cat /tmp/tmp.ltD7GbxuUd ++ rm /tmp/tmp.0fWjxfcCLp /tmp/tmp.ltD7GbxuUd ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + echo 'waiting for password propagation' waiting for password propagation + sleep 1 + let retry+=1 + [[ 8 -ge 240 ]] + is_old_password_discarded monitor '-h some-name-pxc -uroot -p'\''test-password'\''' + local username=monitor + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' + grep NULL + run_mysql 'SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' '-h some-name-pxc -uroot -p'\''test-password'\''' + local 'command=SELECT User_attributes FROM mysql.user WHERE user='\''monitor'\''' + local 'uri=-h some-name-pxc -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jnuJTOBA27 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3vnwpNe8G8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jnuJTOBA27 ++ cat /tmp/tmp.3vnwpNe8G8 ++ rm /tmp/tmp.jnuJTOBA27 /tmp/tmp.3vnwpNe8G8 ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace NULL + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ABCSOqytD0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VoWlBoFQ4O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ABCSOqytD0 ++ cat /tmp/tmp.VoWlBoFQ4O ++ rm /tmp/tmp.ABCSOqytD0 /tmp/tmp.VoWlBoFQ4O ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MQKKB3FXx5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3HFcjWhn5z ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MQKKB3FXx5 ++ cat /tmp/tmp.3HFcjWhn5z ++ rm /tmp/tmp.MQKKB3FXx5 /tmp/tmp.3HFcjWhn5z ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.fA8W3Rd5wt ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.vA5zqnt2b7 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.fA8W3Rd5wt +++++ cat /tmp/tmp.vA5zqnt2b7 +++++ rm /tmp/tmp.fA8W3Rd5wt /tmp/tmp.vA5zqnt2b7 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.PBLIvedXcN ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.S27JqsmAk5 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.PBLIvedXcN +++++ cat /tmp/tmp.S27JqsmAk5 +++++ rm /tmp/tmp.PBLIvedXcN /tmp/tmp.S27JqsmAk5 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sNntYDKKL0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qFJJbxpzUb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sNntYDKKL0 ++ cat /tmp/tmp.qFJJbxpzUb ++ rm /tmp/tmp.sNntYDKKL0 /tmp/tmp.qFJJbxpzUb ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qY4bjvg9JH +++ mktemp ++ local LAST_ERR=/tmp/tmp.cd5R0yu6y4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qY4bjvg9JH ++ cat /tmp/tmp.cd5R0yu6y4 ++ rm /tmp/tmp.qY4bjvg9JH /tmp/tmp.cd5R0yu6y4 ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.ocCIpgbhE8/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.pAvXZ74L5Y ++ mktemp + local LAST_ERR=/tmp/tmp.SqymW9MxAn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pAvXZ74L5Y secret/my-cluster-secrets patched + cat /tmp/tmp.SqymW9MxAn + rm /tmp/tmp.pAvXZ74L5Y /tmp/tmp.SqymW9MxAn + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lDZTEK5EVZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.SdP24Wkrn1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lDZTEK5EVZ ++ cat /tmp/tmp.SdP24Wkrn1 ++ rm /tmp/tmp.lDZTEK5EVZ /tmp/tmp.SdP24Wkrn1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pJdYPabW8w +++ mktemp ++ local LAST_ERR=/tmp/tmp.yfOlMEnegk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pJdYPabW8w ++ cat /tmp/tmp.yfOlMEnegk ++ rm /tmp/tmp.pJdYPabW8w /tmp/tmp.yfOlMEnegk ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dxl9NFPXKq +++ mktemp ++ local LAST_ERR=/tmp/tmp.nshpx11hz2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dxl9NFPXKq ++ cat /tmp/tmp.nshpx11hz2 ++ rm /tmp/tmp.dxl9NFPXKq /tmp/tmp.nshpx11hz2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.MO4klLBxHf ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.TH9P2cuRko +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.MO4klLBxHf +++++ cat /tmp/tmp.TH9P2cuRko +++++ rm /tmp/tmp.MO4klLBxHf /tmp/tmp.TH9P2cuRko +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.cShq8bkhhx ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.s2qMWMJQZw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.cShq8bkhhx +++++ cat /tmp/tmp.s2qMWMJQZw +++++ rm /tmp/tmp.cShq8bkhhx /tmp/tmp.s2qMWMJQZw +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2xCFDY1FvF +++ mktemp ++ local LAST_ERR=/tmp/tmp.5z5FjIkmwi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2xCFDY1FvF ++ cat /tmp/tmp.5z5FjIkmwi ++ rm /tmp/tmp.2xCFDY1FvF /tmp/tmp.5z5FjIkmwi ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QYNsQ2eLBF +++ mktemp ++ local LAST_ERR=/tmp/tmp.hFerZPoHdP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QYNsQ2eLBF ++ cat /tmp/tmp.hFerZPoHdP ++ rm /tmp/tmp.QYNsQ2eLBF /tmp/tmp.hFerZPoHdP ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.ocCIpgbhE8/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.XvK0GReqoP ++ mktemp + local LAST_ERR=/tmp/tmp.dORrAh6Kuq + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XvK0GReqoP perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.dORrAh6Kuq + rm /tmp/tmp.XvK0GReqoP /tmp/tmp.dORrAh6Kuq + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NM2Nv7kkB6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.CvjXhQ3vfj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NM2Nv7kkB6 ++ cat /tmp/tmp.CvjXhQ3vfj ++ rm /tmp/tmp.NM2Nv7kkB6 /tmp/tmp.CvjXhQ3vfj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LFZKqS6W4e +++ mktemp ++ local LAST_ERR=/tmp/tmp.HfwxKifZrB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LFZKqS6W4e ++ cat /tmp/tmp.HfwxKifZrB ++ rm /tmp/tmp.LFZKqS6W4e /tmp/tmp.HfwxKifZrB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l2yOiwIGKi +++ mktemp ++ local LAST_ERR=/tmp/tmp.7T0LFB6SJB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l2yOiwIGKi ++ cat /tmp/tmp.7T0LFB6SJB ++ rm /tmp/tmp.l2yOiwIGKi /tmp/tmp.7T0LFB6SJB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9ftUTpZX2h +++ mktemp ++ local LAST_ERR=/tmp/tmp.ytx9hL45rm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9ftUTpZX2h ++ cat /tmp/tmp.ytx9hL45rm ++ rm /tmp/tmp.9ftUTpZX2h /tmp/tmp.ytx9hL45rm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wpItfVyE1L +++ mktemp ++ local LAST_ERR=/tmp/tmp.5gI77u0xBm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wpItfVyE1L ++ cat /tmp/tmp.5gI77u0xBm ++ rm /tmp/tmp.wpItfVyE1L /tmp/tmp.5gI77u0xBm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RuOVAJfVne +++ mktemp ++ local LAST_ERR=/tmp/tmp.azYmMSZ4Nn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RuOVAJfVne ++ cat /tmp/tmp.azYmMSZ4Nn ++ rm /tmp/tmp.RuOVAJfVne /tmp/tmp.azYmMSZ4Nn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.h8tGOh5I3N +++ mktemp ++ local LAST_ERR=/tmp/tmp.qEQCeP9DOC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.h8tGOh5I3N ++ cat /tmp/tmp.qEQCeP9DOC ++ rm /tmp/tmp.h8tGOh5I3N /tmp/tmp.qEQCeP9DOC ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NKGCCzFZ4c +++ mktemp ++ local LAST_ERR=/tmp/tmp.irPPUedC5s ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NKGCCzFZ4c ++ cat /tmp/tmp.irPPUedC5s ++ rm /tmp/tmp.NKGCCzFZ4c /tmp/tmp.irPPUedC5s ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6tvxrpwBkF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.wkLyGQ14iT +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6tvxrpwBkF +++++ cat /tmp/tmp.wkLyGQ14iT +++++ rm /tmp/tmp.6tvxrpwBkF /tmp/tmp.wkLyGQ14iT +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.OB1o0ncoYs ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xbtAfY4oHU +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.OB1o0ncoYs +++++ cat /tmp/tmp.xbtAfY4oHU +++++ rm /tmp/tmp.OB1o0ncoYs /tmp/tmp.xbtAfY4oHU +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.x96SA27AcQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.HlZwBC0kbb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.x96SA27AcQ ++ cat /tmp/tmp.HlZwBC0kbb ++ rm /tmp/tmp.x96SA27AcQ /tmp/tmp.HlZwBC0kbb ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.i5dFNontCh ++ mktemp + local LAST_ERR=/tmp/tmp.HhNLjsT5cZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.i5dFNontCh secret/my-cluster-secrets-2 patched + cat /tmp/tmp.HhNLjsT5cZ + rm /tmp/tmp.i5dFNontCh /tmp/tmp.HhNLjsT5cZ + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QXAtn5ctCD +++ mktemp ++ local LAST_ERR=/tmp/tmp.byQxwS8WAl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QXAtn5ctCD ++ cat /tmp/tmp.byQxwS8WAl ++ rm /tmp/tmp.QXAtn5ctCD /tmp/tmp.byQxwS8WAl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8d02oY7P0e +++ mktemp ++ local LAST_ERR=/tmp/tmp.nil93mlcN3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8d02oY7P0e ++ cat /tmp/tmp.nil93mlcN3 ++ rm /tmp/tmp.8d02oY7P0e /tmp/tmp.nil93mlcN3 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Q80XQ75gy +++ mktemp ++ local LAST_ERR=/tmp/tmp.j4uYbE0QGF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Q80XQ75gy ++ cat /tmp/tmp.j4uYbE0QGF ++ rm /tmp/tmp.3Q80XQ75gy /tmp/tmp.j4uYbE0QGF ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.B7ZK4NfSzF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7xN84C1txN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.B7ZK4NfSzF +++++ cat /tmp/tmp.7xN84C1txN +++++ rm /tmp/tmp.B7ZK4NfSzF /tmp/tmp.7xN84C1txN +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mQSLoWYSXY ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.HhgvqzZg4M +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mQSLoWYSXY +++++ cat /tmp/tmp.HhgvqzZg4M +++++ rm /tmp/tmp.mQSLoWYSXY /tmp/tmp.HhgvqzZg4M +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1wSidzl7CU +++ mktemp ++ local LAST_ERR=/tmp/tmp.CCwGjGdzif ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1wSidzl7CU ++ cat /tmp/tmp.CCwGjGdzif ++ rm /tmp/tmp.1wSidzl7CU /tmp/tmp.CCwGjGdzif ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MhtD2SiQXZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.wwMOpNxVEe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MhtD2SiQXZ ++ cat /tmp/tmp.wwMOpNxVEe ++ rm /tmp/tmp.MhtD2SiQXZ /tmp/tmp.wwMOpNxVEe ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.ocCIpgbhE8/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.m7LC2zMcGE +++ mktemp ++ local LAST_ERR=/tmp/tmp.wqJzZiKA7x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m7LC2zMcGE ++ cat /tmp/tmp.wqJzZiKA7x ++ rm /tmp/tmp.m7LC2zMcGE /tmp/tmp.wqJzZiKA7x ++ return 0 + newpass='c@+$,fJjwqK7KR$,2' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''c@+$,fJjwqK7KR$,2'\'';' '-h some-name-pxc -uroot -p'\''c@+$,fJjwqK7KR$,2'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''c@+$,fJjwqK7KR$,2'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''c@+$,fJjwqK7KR$,2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OZshKYwQDc +++ mktemp ++ local LAST_ERR=/tmp/tmp.FqsjcJ2Ars ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OZshKYwQDc ++ cat /tmp/tmp.FqsjcJ2Ars ++ rm /tmp/tmp.OZshKYwQDc /tmp/tmp.FqsjcJ2Ars ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''c@+$,fJjwqK7KR$,2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''c@+$,fJjwqK7KR$,2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''c@+$,fJjwqK7KR$,2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''c@+$,fJjwqK7KR$,2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3QCO1UDX5u +++ mktemp ++ local LAST_ERR=/tmp/tmp.QoflxHKnFb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3QCO1UDX5u ++ cat /tmp/tmp.QoflxHKnFb ++ rm /tmp/tmp.3QCO1UDX5u /tmp/tmp.QoflxHKnFb ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.ocCIpgbhE8/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.E0PZq7Wije +++ mktemp ++ local LAST_ERR=/tmp/tmp.tQQypww87O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.E0PZq7Wije ++ cat /tmp/tmp.tQQypww87O ++ rm /tmp/tmp.E0PZq7Wije /tmp/tmp.tQQypww87O ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ej8PNUYLVh ++ mktemp + local LAST_ERR=/tmp/tmp.iVjg9B3wX2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ej8PNUYLVh secret/my-cluster-secrets-2 configured + cat /tmp/tmp.iVjg9B3wX2 Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.ej8PNUYLVh /tmp/tmp.iVjg9B3wX2 + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql ']' + expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ju22pxr6sz +++ mktemp ++ local LAST_ERR=/tmp/tmp.Twip54rwC8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ju22pxr6sz ++ cat /tmp/tmp.Twip54rwC8 ++ rm /tmp/tmp.Ju22pxr6sz /tmp/tmp.Twip54rwC8 ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-4-80.sql /tmp/tmp.ocCIpgbhE8/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + local LAST_OUT=/tmp/tmp.WMmTYD9Pjr + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc8.0-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1743-51af0517#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.vp1rlYcZpI + local exit_status=0 + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-23355~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WMmTYD9Pjr perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.vp1rlYcZpI + rm /tmp/tmp.WMmTYD9Pjr /tmp/tmp.vp1rlYcZpI + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JZxGgpQz3f +++ mktemp ++ local LAST_ERR=/tmp/tmp.p3AP9pzyC9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JZxGgpQz3f ++ cat /tmp/tmp.p3AP9pzyC9 ++ rm /tmp/tmp.JZxGgpQz3f /tmp/tmp.p3AP9pzyC9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NTj8CJ94F2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.oPbaRvL2wY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NTj8CJ94F2 ++ cat /tmp/tmp.oPbaRvL2wY ++ rm /tmp/tmp.NTj8CJ94F2 /tmp/tmp.oPbaRvL2wY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Jt3ikSGoes +++ mktemp ++ local LAST_ERR=/tmp/tmp.hoeYP1difF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Jt3ikSGoes ++ cat /tmp/tmp.hoeYP1difF ++ rm /tmp/tmp.Jt3ikSGoes /tmp/tmp.hoeYP1difF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cFP7o1D8QA +++ mktemp ++ local LAST_ERR=/tmp/tmp.iptaBuyO1u ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cFP7o1D8QA ++ cat /tmp/tmp.iptaBuyO1u ++ rm /tmp/tmp.cFP7o1D8QA /tmp/tmp.iptaBuyO1u ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Eu1dacrAum +++ mktemp ++ local LAST_ERR=/tmp/tmp.5bfIy4UHgI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Eu1dacrAum ++ cat /tmp/tmp.5bfIy4UHgI ++ rm /tmp/tmp.Eu1dacrAum /tmp/tmp.5bfIy4UHgI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5dgLuAlELq +++ mktemp ++ local LAST_ERR=/tmp/tmp.dQaOd3MmFK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5dgLuAlELq ++ cat /tmp/tmp.dQaOd3MmFK ++ rm /tmp/tmp.5dgLuAlELq /tmp/tmp.dQaOd3MmFK ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9DErdbNpEb +++ mktemp ++ local LAST_ERR=/tmp/tmp.wED3jv5yh3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9DErdbNpEb ++ cat /tmp/tmp.wED3jv5yh3 ++ rm /tmp/tmp.9DErdbNpEb /tmp/tmp.wED3jv5yh3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ozBW9Htpw6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.5NRKt1Nrnx ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ozBW9Htpw6 ++ cat /tmp/tmp.5NRKt1Nrnx ++ rm /tmp/tmp.ozBW9Htpw6 /tmp/tmp.5NRKt1Nrnx ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UPFMgSiZFa +++ mktemp ++ local LAST_ERR=/tmp/tmp.rTj2su48jo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UPFMgSiZFa ++ cat /tmp/tmp.rTj2su48jo ++ rm /tmp/tmp.UPFMgSiZFa /tmp/tmp.rTj2su48jo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IYYnyBsK2k +++ mktemp ++ local LAST_ERR=/tmp/tmp.dQoBOoFQZa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IYYnyBsK2k ++ cat /tmp/tmp.dQoBOoFQZa ++ rm /tmp/tmp.IYYnyBsK2k /tmp/tmp.dQoBOoFQZa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pM5YjcKngW +++ mktemp ++ local LAST_ERR=/tmp/tmp.U5BouCSZVk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pM5YjcKngW ++ cat /tmp/tmp.U5BouCSZVk ++ rm /tmp/tmp.pM5YjcKngW /tmp/tmp.U5BouCSZVk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YA8SyTfttX +++ mktemp ++ local LAST_ERR=/tmp/tmp.OoKQsnVFSa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YA8SyTfttX ++ cat /tmp/tmp.OoKQsnVFSa ++ rm /tmp/tmp.YA8SyTfttX /tmp/tmp.OoKQsnVFSa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BHyQi6reyu +++ mktemp ++ local LAST_ERR=/tmp/tmp.zGP1hGt4Gz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BHyQi6reyu ++ cat /tmp/tmp.zGP1hGt4Gz ++ rm /tmp/tmp.BHyQi6reyu /tmp/tmp.zGP1hGt4Gz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UqN9hkiKjZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.pFy57oLAoM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UqN9hkiKjZ ++ cat /tmp/tmp.pFy57oLAoM ++ rm /tmp/tmp.UqN9hkiKjZ /tmp/tmp.pFy57oLAoM ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jUocXuWVKg +++ mktemp ++ local LAST_ERR=/tmp/tmp.HJjNqf8HOH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jUocXuWVKg ++ cat /tmp/tmp.HJjNqf8HOH ++ rm /tmp/tmp.jUocXuWVKg /tmp/tmp.HJjNqf8HOH ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.qkRJ7eWJUa ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.747NR4RA3S +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.qkRJ7eWJUa +++++ cat /tmp/tmp.747NR4RA3S +++++ rm /tmp/tmp.qkRJ7eWJUa /tmp/tmp.747NR4RA3S +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9P6NldbUwl +++ mktemp ++ local LAST_ERR=/tmp/tmp.yJhxDJXNrF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9P6NldbUwl ++ cat /tmp/tmp.yJhxDJXNrF ++ rm /tmp/tmp.9P6NldbUwl /tmp/tmp.yJhxDJXNrF ++ return 0 + [[ 3 == \3 ]] + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NMykzO1YEN +++ mktemp ++ local LAST_ERR=/tmp/tmp.YXkXKbNQpe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NMykzO1YEN ++ cat /tmp/tmp.YXkXKbNQpe ++ rm /tmp/tmp.NMykzO1YEN /tmp/tmp.YXkXKbNQpe ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.eYspgv6ziF ++ mktemp + local LAST_ERR=/tmp/tmp.IqijZDVzTz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.eYspgv6ziF secret/my-cluster-secrets patched + cat /tmp/tmp.IqijZDVzTz + rm /tmp/tmp.eYspgv6ziF /tmp/tmp.IqijZDVzTz + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LmfF7QGWyD +++ mktemp ++ local LAST_ERR=/tmp/tmp.tQkEcxsTSy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LmfF7QGWyD ++ cat /tmp/tmp.tQkEcxsTSy ++ rm /tmp/tmp.LmfF7QGWyD /tmp/tmp.tQkEcxsTSy ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cV1RubMWak +++ mktemp ++ local LAST_ERR=/tmp/tmp.11JNb3TdGZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cV1RubMWak ++ cat /tmp/tmp.11JNb3TdGZ ++ rm /tmp/tmp.cV1RubMWak /tmp/tmp.11JNb3TdGZ ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.k9Xphq1Rgj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ur0MA5L87b +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.k9Xphq1Rgj +++++ cat /tmp/tmp.ur0MA5L87b +++++ rm /tmp/tmp.k9Xphq1Rgj /tmp/tmp.ur0MA5L87b +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WIvPztyIN7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.99VIivtpbq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WIvPztyIN7 ++ cat /tmp/tmp.99VIivtpbq ++ rm /tmp/tmp.WIvPztyIN7 /tmp/tmp.99VIivtpbq ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 8\.0 ]] + '[' -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-3-80.sql ']' + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PyQG3ADKcf +++ mktemp ++ local LAST_ERR=/tmp/tmp.pntalRfSmI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PyQG3ADKcf ++ cat /tmp/tmp.pntalRfSmI ++ rm /tmp/tmp.PyQG3ADKcf /tmp/tmp.pntalRfSmI ++ return 0 + client_pod=pxc-client-6644d8898f-46xmv + wait_pod pxc-client-6644d8898f-46xmv + local pod=pxc-client-6644d8898f-46xmv + local max_retry=480 + local ns= ++ echo pxc-client-6644d8898f-46xmv ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-6644d8898f-46xmv condition met pxc-client-6644d8898f-46xmv.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.ocCIpgbhE8/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1743/e2e-tests/users/compare/select-3.sql /tmp/tmp.ocCIpgbhE8/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc8.0 =~ 5\.7 ]] + check_generation 1 haproxy some-name + local generation=1 + local container=haproxy + local cluster=some-name + local current_generation ++ kubectl_bin get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jWRBerdsti +++ mktemp ++ local LAST_ERR=/tmp/tmp.wOrZ8Y8h4X ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get statefulset some-name-haproxy -o 'jsonpath={.metadata.generation}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jWRBerdsti ++ cat /tmp/tmp.wOrZ8Y8h4X ++ rm /tmp/tmp.jWRBerdsti /tmp/tmp.wOrZ8Y8h4X ++ return 0 + current_generation=1 + [[ 1 != \1 ]] + destroy users-23355 + local namespace=users-23355 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ + sort -u + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.ocCIpgbhE8/operator.log + grep -v 'get backup status: Job.batch' +++ grep -c percona-xtradb-cluster-operator +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.LC3RnB7gOr +++ mktemp ++ local LAST_ERR=/tmp/tmp.aMCcsKBtkS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LC3RnB7gOr ++ cat /tmp/tmp.aMCcsKBtkS ++ rm /tmp/tmp.LC3RnB7gOr /tmp/tmp.aMCcsKBtkS ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-675fc7d9f5-z6gxg ++ mktemp + local LAST_OUT=/tmp/tmp.2MsL3cGLx9 ++ mktemp + local LAST_ERR=/tmp/tmp.dO2RSe3pzD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-675fc7d9f5-z6gxg + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2MsL3cGLx9 + cat /tmp/tmp.dO2RSe3pzD + rm /tmp/tmp.2MsL3cGLx9 /tmp/tmp.dO2RSe3pzD + return 0 2024-07-10T14:12:21.724Z INFO setup Manager starting up {"gitCommit": "51af051703ea68e6367821ec3068826842ab5c9d", "gitBranch": "PR-1743-51af0517", "buildTime": "2024-07-10T11:38:48Z", "goVersion": "go1.22.5", "os": "linux", "arch": "amd64"} 2024-07-10T14:12:21.724Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1469001"} 2024-07-10T14:12:21.726Z INFO setup Registering Components. 2024-07-10T14:12:24.698Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-07-10T14:12:24.709Z INFO setup Starting the Cmd. 2024-07-10T14:12:24.710Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-07-10T14:12:24.710Z INFO controller-runtime.metrics Starting metrics server 2024-07-10T14:12:24.710Z INFO controller-runtime.webhook Starting webhook server 2024-07-10T14:12:24.710Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-07-10T14:12:24.711Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-07-10T14:12:24.791Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-07-10T14:12:24.792Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-07-10T14:12:24.811Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-07-10T14:12:24.870Z DEBUG events percona-xtradb-cluster-operator-675fc7d9f5-z6gxg_f66e9c7b-4f05-4d41-96d4-4ce742041316 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"9782b4e3-19d5-4e00-925d-683a2c061dd6","apiVersion":"coordination.k8s.io/v1","resourceVersion":"82764"}, "reason": "LeaderElection"} 2024-07-10T14:12:24.870Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-07-10T14:12:24.871Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-07-10T14:12:24.871Z INFO Starting Controller {"controller": "pxc-controller"} 2024-07-10T14:12:24.871Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-07-10T14:12:24.871Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-07-10T14:12:24.871Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-07-10T14:12:24.871Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-07-10T14:12:25.090Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-07-10T14:12:25.090Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-07-10T14:12:25.090Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-07-10T14:12:54.168Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fdddcbf0-6ff7-4f4b-8db5-6d8ccea4ea17", "version": "1.15.0"} 2024-07-10T14:12:54.611Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fdddcbf0-6ff7-4f4b-8db5-6d8ccea4ea17", "object": "some-name-pxc"} 2024-07-10T14:12:54.762Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fdddcbf0-6ff7-4f4b-8db5-6d8ccea4ea17", "object": "some-name-pxc"} 2024-07-10T14:12:54.897Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fdddcbf0-6ff7-4f4b-8db5-6d8ccea4ea17", "object": "some-name-proxysql"} 2024-07-10T14:12:54.947Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fdddcbf0-6ff7-4f4b-8db5-6d8ccea4ea17", "object": "some-name-pxc"} 2024-07-10T14:12:55.055Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fdddcbf0-6ff7-4f4b-8db5-6d8ccea4ea17", "object": "some-name-pxc-unready"} 2024-07-10T14:12:55.192Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fdddcbf0-6ff7-4f4b-8db5-6d8ccea4ea17", "object": "some-name-proxysql"} 2024-07-10T14:12:55.284Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fdddcbf0-6ff7-4f4b-8db5-6d8ccea4ea17", "object": "some-name-proxysql"} 2024-07-10T14:12:55.503Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fdddcbf0-6ff7-4f4b-8db5-6d8ccea4ea17", "object": "some-name-proxysql-unready"} 2024-07-10T14:14:12.300Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "cc715af8-040e-4d07-8652-23cb0f959d4c", "user": "operator"} 2024-07-10T14:14:12.351Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "cc715af8-040e-4d07-8652-23cb0f959d4c", "user": "monitor"} 2024-07-10T14:14:12.453Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "cc715af8-040e-4d07-8652-23cb0f959d4c"} 2024-07-10T14:14:12.494Z INFO monitor user privileges granted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "cc715af8-040e-4d07-8652-23cb0f959d4c"} 2024-07-10T14:14:12.542Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "cc715af8-040e-4d07-8652-23cb0f959d4c", "user": "xtrabackup"} 2024-07-10T14:14:12.605Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "cc715af8-040e-4d07-8652-23cb0f959d4c"} 2024-07-10T14:14:12.654Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "cc715af8-040e-4d07-8652-23cb0f959d4c", "user": "replication"} 2024-07-10T14:14:12.694Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "cc715af8-040e-4d07-8652-23cb0f959d4c", "err": "get primary pxc pod: not found"} 2024-07-10T14:14:17.323Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fe8f3436-6822-4201-865a-19fbaad209ee", "err": "get primary pxc pod: not found"} 2024-07-10T14:14:22.476Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "58baf184-b828-4d81-ae30-c3dcd358e836", "err": "get primary pxc pod: not found"} 2024-07-10T14:14:27.600Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "86521a74-f9c2-4e2c-b3d4-042206690ea1", "err": "get primary pxc pod: not found"} 2024-07-10T14:16:34.453Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "c93eb00d-4a74-4873-9a85-1f520207ba75", "user": "root"} 2024-07-10T14:16:34.677Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "c93eb00d-4a74-4873-9a85-1f520207ba75", "new version": "8.0.36-28.1"} 2024-07-10T14:16:37.984Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "c93eb00d-4a74-4873-9a85-1f520207ba75"} 2024-07-10T14:16:42.805Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "cb9cafe4-a354-4dd1-9553-17a1814e5621"} 2024-07-10T14:16:48.118Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fc828570-8432-4d2c-9bfd-55b708c19142"} 2024-07-10T14:16:53.618Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "27e92ce3-3c62-44a3-9ec8-95f9a1b7028d"} 2024-07-10T14:16:58.899Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "16af9d6e-be3b-4b4c-a753-676822aa6780"} 2024-07-10T14:17:04.207Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "f29c57dc-826d-4164-a418-aba5bd7bce30"} 2024-07-10T14:17:09.574Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "49c2e7b3-b973-43b4-a36f-69f446d6d7d6"} 2024-07-10T14:17:14.893Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "19935505-9347-4c50-b829-38fc39e9446c"} 2024-07-10T14:17:20.214Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "2de63cf3-6eaa-4e36-a2b1-bca1f47d49d9"} 2024-07-10T14:17:25.700Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "81b147c1-2d0a-41f0-ae6c-0652d002673c"} 2024-07-10T14:17:31.586Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "35e98a53-f79b-4cd2-93a4-6a0c21553a14"} 2024-07-10T14:17:36.117Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "5d995c48-86d1-4a84-93fa-075925983d7c"} 2024-07-10T14:17:38.184Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "3cd7c685-5994-470f-89c0-428cdffadc12", "user": "root"} 2024-07-10T14:17:38.228Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "3cd7c685-5994-470f-89c0-428cdffadc12", "user": "root"} 2024-07-10T14:17:38.298Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "3cd7c685-5994-470f-89c0-428cdffadc12", "secret": "some-name-mysql-init", "user": "root"} 2024-07-10T14:17:43.945Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "3cd7c685-5994-470f-89c0-428cdffadc12"} 2024-07-10T14:17:43.961Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "3cd7c685-5994-470f-89c0-428cdffadc12", "user": "root"} 2024-07-10T14:17:44.016Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "3cd7c685-5994-470f-89c0-428cdffadc12", "user": "root"} 2024-07-10T14:17:47.603Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "3cd7c685-5994-470f-89c0-428cdffadc12"} 2024-07-10T14:17:53.413Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "e17b6d0b-87e1-47e8-bc75-f77e09855df9"} 2024-07-10T14:17:56.294Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "a6ff86b5-c9e8-4f9e-8d72-e8ee2e192759", "object": "some-name-proxysql"} 2024-07-10T14:17:58.608Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "528f667c-acaf-4ba2-af54-d27607a8cda6"} 2024-07-10T14:18:13.790Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "23fbdac6-e390-40ac-995f-aa7cdceaaf05", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.206.194.71:33062: connect: connection refused"} 2024-07-10T14:18:36.450Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "01c827a2-caee-4fbd-80b6-73598daee0c9", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.206.194.71:33062: i/o timeout"} 2024-07-10T14:18:36.616Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6d1bc27f-8ee9-4c68-8ca1-ab1eb275dc98", "err": "get primary pxc pod: not found"} 2024-07-10T14:18:36.835Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "5085fae7-cc97-4e1f-be00-ab637be69040", "primary name": "some-name-pxc-0.some-name-pxc.users-23355.svc.cluster.local"} 2024-07-10T14:18:41.585Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "05d79ef2-dd68-42ad-bbae-8188dc3a4a7c", "err": "get primary pxc pod: not found"} 2024-07-10T14:18:41.728Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "0d0245fa-2f52-4f1d-9257-9c55571de4c3", "primary name": "some-name-pxc-0.some-name-pxc.users-23355.svc.cluster.local"} 2024-07-10T14:18:46.732Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "cca1dbbf-8815-46be-ae3b-ea8d5596ae45", "err": "get primary pxc pod: not found"} 2024-07-10T14:18:57.098Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "046cc4af-952d-4146-80e6-353738759c1d", "err": "get primary pxc pod: not found"} 2024-07-10T14:19:02.434Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "08b7e118-503b-4c10-807b-7ba7a1a845e6", "err": "get primary pxc pod: not found"} 2024-07-10T14:19:07.417Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "48e8399a-5084-4945-836a-70382b9f085f", "err": "get primary pxc pod: not found"} 2024-07-10T14:19:12.519Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "dc8c6d58-556e-491a-8f98-16a61465439c", "err": "get primary pxc pod: not found"} 2024-07-10T14:19:12.694Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "656dd5e9-6b29-4112-8809-9a5cc238aa2f", "err": "get primary pxc pod: not found"} 2024-07-10T14:19:17.682Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "78e24c98-723e-47ca-9b20-6f10618a4df8", "err": "get primary pxc pod: not found"} 2024-07-10T14:19:22.823Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "144f73db-db2a-4550-8129-fb26875b2f05", "err": "get primary pxc pod: not found"} 2024-07-10T14:19:33.199Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ced91b34-c0fc-4e13-836a-bbcfcbe96200", "err": "get primary pxc pod: not found"} 2024-07-10T14:19:38.396Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "cf2425d0-12ef-4e40-be2a-cb4e28eebe6d", "err": "get primary pxc pod: not found"} 2024-07-10T14:19:43.500Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "060c1e37-7524-4f5c-9c24-3b97b31134e0", "err": "get primary pxc pod: not found"} 2024-07-10T14:19:43.698Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b1ff6da8-e282-4c86-aa70-e655823d8a32", "err": "get primary pxc pod: not found"} 2024-07-10T14:19:48.799Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "5ad9336d-4043-4fed-92ff-06d63064ab83", "err": "get primary pxc pod: not found"} 2024-07-10T14:19:53.807Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6bafd126-1980-4900-b95c-ecbd60f1e991", "err": "get primary pxc pod: not found"} 2024-07-10T14:20:04.144Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "883d5ad4-a135-494d-a1f2-d52273d59984", "err": "get primary pxc pod: not found"} 2024-07-10T14:20:09.247Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "a6a9f24e-0284-4cfd-aeda-28f1af763278", "err": "get primary pxc pod: not found"} 2024-07-10T14:20:14.748Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "93444a6a-a8ee-443f-8b67-b49a82f74b47", "err": "get primary pxc pod: not found"} 2024-07-10T14:20:15.520Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8fb7201a-4e83-4519-a8b2-975a9554a84b", "err": "get primary pxc pod: not found"} 2024-07-10T14:20:20.473Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "637b92f4-9586-476a-88cd-60a220d6a783", "err": "get primary pxc pod: not found"} 2024-07-10T14:20:36.175Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "c1d9fd83-0b2f-44e2-b9c9-1070e6585468", "err": "get primary pxc pod: not found"} 2024-07-10T14:20:46.421Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "e8cb4e59-1267-426b-9603-6aaae098439c", "err": "get primary pxc pod: not found"} 2024-07-10T14:20:51.569Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "69a0937d-1aa2-490f-917c-e403f76ed1d1", "err": "get primary pxc pod: not found"} 2024-07-10T14:21:07.091Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "e8eef8bb-8f73-401e-9bb8-b56c71f89d0c", "err": "get primary pxc pod: not found"} 2024-07-10T14:21:12.309Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "937fbc0b-4381-4906-953b-05e11fe1205b", "err": "get primary pxc pod: not found"} 2024-07-10T14:21:12.668Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "2e464fd4-6140-4bc2-beaa-7cb966f63042", "err": "get primary pxc pod: not found"} 2024-07-10T14:21:17.160Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "2e464fd4-6140-4bc2-beaa-7cb966f63042", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1225\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-10T14:21:21.467Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "cbba9e7d-bb4a-4e6f-b750-07bab5bec572", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1225\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-10T14:21:26.862Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "1acf38dd-ece8-4df4-81e5-61b2bb0aaabc", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1225\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-10T14:21:28.283Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "36682769-c545-47a7-977c-4c4b20df6e46", "user": "proxyadmin"} 2024-07-10T14:21:28.283Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "36682769-c545-47a7-977c-4c4b20df6e46", "user": "proxyadmin"} 2024-07-10T14:21:28.352Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "36682769-c545-47a7-977c-4c4b20df6e46", "user": "proxyadmin"} 2024-07-10T14:21:28.363Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "36682769-c545-47a7-977c-4c4b20df6e46", "user": "proxyadmin"} 2024-07-10T14:21:28.363Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "36682769-c545-47a7-977c-4c4b20df6e46", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-07-10T14:21:28.399Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "36682769-c545-47a7-977c-4c4b20df6e46", "object": "some-name-proxysql"} 2024-07-10T14:21:28.626Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "36682769-c545-47a7-977c-4c4b20df6e46", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1225\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-10T14:22:07.149Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "953eda30-f45c-441d-ba7c-1d4a5f23064d"} 2024-07-10T14:22:15.616Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "61412053-da85-48b5-8e8c-3708e25e435a", "object": "some-name-proxysql"} 2024-07-10T14:22:15.762Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "3dc188f8-337c-454b-9649-0ce9e629d329", "error": "exec syncusers: command terminated with exit code 137 / / ", "errorVerbose": "exec syncusers: command terminated with exit code 137 / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1225\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-10T14:22:21.373Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "a4f2de26-e865-4e42-8634-1d4faa31b9af", "user": "xtrabackup"} 2024-07-10T14:22:21.400Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "a4f2de26-e865-4e42-8634-1d4faa31b9af", "user": "xtrabackup"} 2024-07-10T14:22:21.413Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "a4f2de26-e865-4e42-8634-1d4faa31b9af", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-10T14:22:21.423Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "a4f2de26-e865-4e42-8634-1d4faa31b9af", "user": "xtrabackup"} 2024-07-10T14:22:21.450Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "a4f2de26-e865-4e42-8634-1d4faa31b9af", "user": "xtrabackup"} 2024-07-10T14:22:21.462Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "a4f2de26-e865-4e42-8634-1d4faa31b9af", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-07-10T14:22:21.519Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "a4f2de26-e865-4e42-8634-1d4faa31b9af", "object": "some-name-pxc"} 2024-07-10T14:22:26.876Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "a4f2de26-e865-4e42-8634-1d4faa31b9af"} 2024-07-10T14:23:23.196Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "c7272579-bfa7-47cb-a92b-f0b7ddaed388", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp 10.206.192.32:33062: connect: connection refused"} 2024-07-10T14:24:21.253Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "0b61db22-9d03-49e9-b53f-ee1a53c2fa30", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-23355 on 10.79.64.10:53: no such host"} 2024-07-10T14:24:57.521Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "51ca3f5b-4baa-499b-81f9-807d9533b33a", "primary name": "some-name-pxc-0.some-name-pxc.users-23355.svc.cluster.local"} 2024-07-10T14:25:06.440Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "2aba2c85-5460-42a3-aa06-7f928fb613d5"} 2024-07-10T14:25:11.422Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "28a14d96-b53d-4a89-a850-1c35c23cf1ed"} 2024-07-10T14:25:16.919Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "063c0a80-a871-4384-a3a2-9b1b2b2e522f"} 2024-07-10T14:25:22.117Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "e6759aa5-d858-4875-9a7a-ca7033bd0e09"} 2024-07-10T14:25:27.427Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8fd1daba-2b9f-46d5-9252-a0006ce5a844"} 2024-07-10T14:25:29.800Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b42bf8f3-70a0-4508-86ea-41f4cb9443d0", "user": "monitor"} 2024-07-10T14:25:29.829Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b42bf8f3-70a0-4508-86ea-41f4cb9443d0", "user": "monitor"} 2024-07-10T14:25:29.931Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b42bf8f3-70a0-4508-86ea-41f4cb9443d0", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-10T14:25:29.978Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b42bf8f3-70a0-4508-86ea-41f4cb9443d0", "user": "monitor"} 2024-07-10T14:25:30.053Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b42bf8f3-70a0-4508-86ea-41f4cb9443d0", "user": "monitor"} 2024-07-10T14:25:30.389Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b42bf8f3-70a0-4508-86ea-41f4cb9443d0", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-07-10T14:25:30.440Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b42bf8f3-70a0-4508-86ea-41f4cb9443d0", "object": "some-name-proxysql"} 2024-07-10T14:25:33.076Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b42bf8f3-70a0-4508-86ea-41f4cb9443d0", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1225\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-10T14:26:18.679Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "148cfc59-c2d4-452a-a8c9-f36ece7fa929", "user": "monitor"} 2024-07-10T14:26:18.940Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "148cfc59-c2d4-452a-a8c9-f36ece7fa929", "user": "monitor"} 2024-07-10T14:26:18.959Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "148cfc59-c2d4-452a-a8c9-f36ece7fa929", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-07-10T14:26:23.440Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "148cfc59-c2d4-452a-a8c9-f36ece7fa929"} 2024-07-10T14:26:27.034Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "2c6b8210-de78-4c2a-8637-6cb3f4cb9f74"} 2024-07-10T14:26:32.332Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "1a966a56-e32e-4989-9216-c8efc1deae62"} 2024-07-10T14:26:38.125Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "4472ee60-bea9-492d-84df-17306d9ff8ca"} 2024-07-10T14:26:43.442Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "d34c38bf-2511-4080-9ef7-51215fccdc1d"} 2024-07-10T14:26:45.399Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "d708e63a-5f9d-4024-ad34-b332d241a812", "user": "operator"} 2024-07-10T14:26:45.431Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "d708e63a-5f9d-4024-ad34-b332d241a812", "user": "operator"} 2024-07-10T14:26:45.449Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "d708e63a-5f9d-4024-ad34-b332d241a812", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-10T14:26:45.465Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "d708e63a-5f9d-4024-ad34-b332d241a812", "user": "operator"} 2024-07-10T14:26:45.497Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "d708e63a-5f9d-4024-ad34-b332d241a812", "user": "operator"} 2024-07-10T14:26:45.531Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "d708e63a-5f9d-4024-ad34-b332d241a812", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-10T14:26:45.579Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "d708e63a-5f9d-4024-ad34-b332d241a812", "object": "some-name-proxysql"} 2024-07-10T14:26:46.825Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "d708e63a-5f9d-4024-ad34-b332d241a812", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-23355.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-23355.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-23355.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-23355.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-23355.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-23355.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1225\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-10T14:27:29.726Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ad19c6db-a127-47ef-a71a-79db5ceb7dcf"} 2024-07-10T14:27:39.202Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "4f91fab1-069c-4e4b-8331-d317e3c2d419"} 2024-07-10T14:27:44.332Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "secrets": "my-cluster-secrets-2"} 2024-07-10T14:27:44.342Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "root"} 2024-07-10T14:27:44.389Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "root"} 2024-07-10T14:27:44.408Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "secret": "some-name-mysql-init", "user": "root"} 2024-07-10T14:27:44.646Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "89d3055d-0303-416d-ab50-7b1b01ae6efa"} 2024-07-10T14:27:48.812Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab"} 2024-07-10T14:27:48.827Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "root"} 2024-07-10T14:27:48.877Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "root"} 2024-07-10T14:27:48.892Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "operator"} 2024-07-10T14:27:48.923Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "operator"} 2024-07-10T14:27:48.934Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-10T14:27:48.947Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "operator"} 2024-07-10T14:27:48.976Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "operator"} 2024-07-10T14:27:48.987Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "monitor"} 2024-07-10T14:27:49.017Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "monitor"} 2024-07-10T14:27:49.029Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-10T14:27:49.074Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "monitor"} 2024-07-10T14:27:49.089Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "monitor"} 2024-07-10T14:27:49.193Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "xtrabackup"} 2024-07-10T14:27:49.224Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "xtrabackup"} 2024-07-10T14:27:49.234Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-10T14:27:49.250Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "xtrabackup"} 2024-07-10T14:27:49.280Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "xtrabackup"} 2024-07-10T14:27:49.293Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "replication"} 2024-07-10T14:27:49.324Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "replication"} 2024-07-10T14:27:49.334Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-10T14:27:49.347Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "replication"} 2024-07-10T14:27:49.374Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "replication"} 2024-07-10T14:27:49.374Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "proxyadmin"} 2024-07-10T14:27:49.418Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "proxyadmin"} 2024-07-10T14:27:49.434Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "user": "proxyadmin"} 2024-07-10T14:27:49.434Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "last-applied-secret": "2b5c9109326f6f68f75789721f030a7e76e58f0213bc365fe5b85b850817f094"} 2024-07-10T14:27:49.434Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "last-applied-secret": "2b5c9109326f6f68f75789721f030a7e76e58f0213bc365fe5b85b850817f094"} 2024-07-10T14:27:49.468Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "object": "some-name-pxc"} 2024-07-10T14:27:49.515Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "object": "some-name-proxysql"} 2024-07-10T14:27:49.684Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "8b32c79d-af77-49d8-a82e-fa16b5ea1cab", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1225\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-10T14:27:50.028Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "33323713-9cd6-4d04-a7e2-4030fae86a2d", "user": "monitor"} 2024-07-10T14:27:50.355Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "33323713-9cd6-4d04-a7e2-4030fae86a2d", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1225\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-10T14:28:46.905Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "218b0d93-2178-4bf0-92f4-db780c62e065", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-23355 on 10.79.64.10:53: no such host"} 2024-07-10T14:28:52.348Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "abaee561-25a6-4ea1-8422-5371bc4c159c", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-23355 on 10.79.64.10:53: no such host"} 2024-07-10T14:29:28.647Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "16b5c786-7f5e-4d44-9001-651372c9ae16", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-23355 on 10.79.64.10:53: no such host"} 2024-07-10T14:29:28.902Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ec90d641-86eb-44ab-b4e6-9c2dc23cf705", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-23355 on 10.79.64.10:53: no such host"} 2024-07-10T14:29:33.848Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "bd5d2fc8-5550-4ed3-9d7d-86999c2f9b76", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-23355 on 10.79.64.10:53: no such host"} 2024-07-10T14:29:39.025Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "010b039f-5401-49c6-a862-a72afc275d9a", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-23355 on 10.79.64.10:53: no such host"} 2024-07-10T14:29:44.311Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "c751c0b9-2309-49e8-bad0-1281b0aa5201", "primary name": "some-name-pxc-0.some-name-pxc.users-23355.svc.cluster.local"} 2024-07-10T14:29:49.466Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fa08bdeb-f5ae-41b6-ae6d-9ae21b298701", "primary name": "some-name-pxc-0.some-name-pxc.users-23355.svc.cluster.local"} 2024-07-10T14:29:54.598Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "62400271-5287-4c00-831f-da77371d78dd", "primary name": "some-name-pxc-0.some-name-pxc.users-23355.svc.cluster.local"} 2024-07-10T14:30:00.236Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "f63b672c-f9b5-4e95-802f-ddb3cccd6fff", "primary name": "some-name-pxc-0.some-name-pxc.users-23355.svc.cluster.local"} 2024-07-10T14:30:05.754Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "64e8b614-e7e6-4722-a07a-ecf2851044ca", "primary name": "some-name-pxc-0.some-name-pxc.users-23355.svc.cluster.local"} 2024-07-10T14:30:10.885Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b0fb6a67-3829-4a8e-8c30-2c160c56f71b", "primary name": "some-name-pxc-0.some-name-pxc.users-23355.svc.cluster.local"} 2024-07-10T14:30:16.312Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "3fd5bbcc-8463-4e36-b9c7-f513defb858c", "user": "monitor"} 2024-07-10T14:30:16.544Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "3fd5bbcc-8463-4e36-b9c7-f513defb858c", "user": "monitor"} 2024-07-10T14:30:16.577Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "3fd5bbcc-8463-4e36-b9c7-f513defb858c", "last-applied-secret": "2b5c9109326f6f68f75789721f030a7e76e58f0213bc365fe5b85b850817f094"} 2024-07-10T14:30:20.298Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "3fd5bbcc-8463-4e36-b9c7-f513defb858c"} 2024-07-10T14:30:24.746Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "64f781dd-9201-43b7-8b61-4e1bd931864c"} 2024-07-10T14:30:30.041Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "1e95bf7e-0d56-418b-9c47-e8e2fadb7331"} 2024-07-10T14:30:31.921Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "41279c6f-b742-417d-917c-ed052d8678b4", "user": "operator"} 2024-07-10T14:30:31.956Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "41279c6f-b742-417d-917c-ed052d8678b4", "user": "operator"} 2024-07-10T14:30:31.966Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "41279c6f-b742-417d-917c-ed052d8678b4", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-10T14:30:31.984Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "41279c6f-b742-417d-917c-ed052d8678b4", "user": "operator"} 2024-07-10T14:30:32.015Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "41279c6f-b742-417d-917c-ed052d8678b4", "user": "operator"} 2024-07-10T14:30:32.050Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "41279c6f-b742-417d-917c-ed052d8678b4", "last-applied-secret": "3477495765c5341398b70403bd4b1c951228eea523056672ff72617c9d7cc02b"} 2024-07-10T14:30:32.083Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "41279c6f-b742-417d-917c-ed052d8678b4", "object": "some-name-proxysql"} 2024-07-10T14:30:33.360Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "41279c6f-b742-417d-917c-ed052d8678b4", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-23355.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-23355.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-23355.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-23355.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-23355.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-23355.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1225\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-10T14:31:15.055Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "a933201f-021d-4138-bc4f-c2b6d0c725be"} 2024-07-10T14:31:18.721Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "23a1a1fc-16c2-479d-b09b-c0d4bc7a8c31"} 2024-07-10T14:31:23.956Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "a54964ab-f889-4358-ac2f-b3a6337c773e"} 2024-07-10T14:31:29.389Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "35150aa7-fc47-4739-a415-12e83eb115fa"} 2024-07-10T14:31:34.623Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "918c1c8a-f5aa-4422-82fc-1a452b40c8c2"} 2024-07-10T14:31:39.925Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "0e4d8976-4170-4801-ab48-3630669f5154"} 2024-07-10T14:31:46.665Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ef9f4adc-e7e6-421b-85ce-68d33e2ec2a3"} 2024-07-10T14:31:50.548Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "65a65540-f1f2-43f1-b989-0ca9a2805cca"} 2024-07-10T14:31:56.002Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "e8f085d9-203f-4eb5-8486-6cd193d3975e"} 2024-07-10T14:32:01.549Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "0d01f555-9bde-491a-a5ec-7fe2c7f7e232"} 2024-07-10T14:32:06.623Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "f85dc9e7-b7fd-4e47-a269-1d670a601f3b"} 2024-07-10T14:32:11.847Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "1d94b25a-b932-4be4-9b40-d6b80a13a2ce"} 2024-07-10T14:32:17.232Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "2a5d10ec-3638-433e-b45b-2c6975cdda0c"} 2024-07-10T14:32:22.738Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "c06f46f3-8cba-4a69-8be7-8a59270f39cd"} 2024-07-10T14:32:28.134Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "714b78b1-e094-437d-b8f4-32ac521678f9"} 2024-07-10T14:32:30.012Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "root"} 2024-07-10T14:32:30.064Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "root"} 2024-07-10T14:32:30.098Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "secret": "some-name-mysql-init", "user": "root"} 2024-07-10T14:32:35.602Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0"} 2024-07-10T14:32:35.618Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "root"} 2024-07-10T14:32:35.667Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "root"} 2024-07-10T14:32:35.690Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "monitor"} 2024-07-10T14:32:35.720Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "monitor"} 2024-07-10T14:32:35.730Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-10T14:32:35.773Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "monitor"} 2024-07-10T14:32:35.787Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "monitor"} 2024-07-10T14:32:35.893Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "xtrabackup"} 2024-07-10T14:32:35.923Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "xtrabackup"} 2024-07-10T14:32:35.938Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-10T14:32:35.958Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "xtrabackup"} 2024-07-10T14:32:35.990Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "xtrabackup"} 2024-07-10T14:32:36.003Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "proxyadmin"} 2024-07-10T14:32:36.049Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "proxyadmin"} 2024-07-10T14:32:36.077Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "user": "proxyadmin"} 2024-07-10T14:32:36.077Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "last-applied-secret": "924044bf743788cf413529fee345e8d4f69479294358bc59eaa78dec71170d1e"} 2024-07-10T14:32:36.077Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "last-applied-secret": "924044bf743788cf413529fee345e8d4f69479294358bc59eaa78dec71170d1e"} 2024-07-10T14:32:36.116Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "object": "some-name-pxc"} 2024-07-10T14:32:36.157Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "object": "some-name-proxysql"} 2024-07-10T14:32:36.279Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ed20d38d-29d9-49b5-a190-0b66d025e2f0", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:922\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1225\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-10T14:32:36.787Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "5b18b85d-eb83-453b-936f-6563ebbfdedd", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-23355 on 10.79.64.10:53: no such host"} 2024-07-10T14:32:47.343Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "38529715-ba9c-4b86-a096-3cd4f4b87723", "object": "some-name-pxc"} 2024-07-10T14:32:47.472Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "38529715-ba9c-4b86-a096-3cd4f4b87723", "object": "some-name-haproxy"} 2024-07-10T14:32:47.607Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "38529715-ba9c-4b86-a096-3cd4f4b87723", "object": "some-name-haproxy"} 2024-07-10T14:32:47.854Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "38529715-ba9c-4b86-a096-3cd4f4b87723", "object": "some-name-haproxy-replicas"} 2024-07-10T14:32:48.102Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 6e550522-f04f-4f82-801c-706b66bcc5b6 2024-07-10T14:32:48.649Z DEBUG Creating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "aca41581-0ee3-4f80-beb2-25f92a87f7cf", "object": "some-name-haproxy"} 2024-07-10T14:33:28.530Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "40a8bdec-61bd-4e4f-92c2-85e30fe5cbac", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-23355 on 10.79.64.10:53: no such host"} 2024-07-10T14:33:33.780Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "5852fefc-b1b3-4b9f-86a5-66f75fbf7184", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-23355 on 10.79.64.10:53: no such host"} 2024-07-10T14:34:52.697Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "root"} 2024-07-10T14:34:52.743Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "root"} 2024-07-10T14:34:52.769Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "secret": "some-name-mysql-init", "user": "root"} 2024-07-10T14:34:52.800Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "root"} 2024-07-10T14:34:52.848Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "root"} 2024-07-10T14:34:52.862Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "operator"} 2024-07-10T14:34:52.888Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "operator"} 2024-07-10T14:34:52.904Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-10T14:34:52.918Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "operator"} 2024-07-10T14:34:52.947Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "operator"} 2024-07-10T14:34:52.961Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "monitor"} 2024-07-10T14:34:52.993Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "monitor"} 2024-07-10T14:34:53.004Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-10T14:34:53.016Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "monitor"} 2024-07-10T14:34:53.133Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "xtrabackup"} 2024-07-10T14:34:53.162Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "xtrabackup"} 2024-07-10T14:34:53.174Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-10T14:34:53.184Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "xtrabackup"} 2024-07-10T14:34:53.210Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "xtrabackup"} 2024-07-10T14:34:53.220Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "replication"} 2024-07-10T14:34:53.245Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "replication"} 2024-07-10T14:34:53.254Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-10T14:34:53.270Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "replication"} 2024-07-10T14:34:53.299Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "user": "replication"} 2024-07-10T14:34:53.299Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-10T14:34:53.393Z DEBUG Updating object {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6b3be0e0-45d0-4e56-9e6d-8b72dcf1f1b0", "object": "some-name-pxc"} 2024-07-10T14:35:44.149Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "5e114884-4785-45ff-9a5e-faa7e6559fff", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-23355 on 10.79.64.10:53: no such host"} 2024-07-10T14:35:49.417Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "ce3d4e81-1e76-46a0-9b77-11caf3369429", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-1: dial tcp: lookup some-name-pxc-1.some-name-pxc.users-23355 on 10.79.64.10:53: no such host"} 2024-07-10T14:36:37.128Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "16d2eec4-57c4-439f-aa08-962ae69883ac", "err": "failed to ensure cluster readonly status: connect to pod some-name-pxc-0: dial tcp 10.206.194.14:33062: connect: connection refused"} 2024-07-10T14:37:18.890Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6f8cb95a-c69d-4648-9369-e3c150f9faa8", "user": "monitor"} 2024-07-10T14:37:19.362Z INFO Old password discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "6f8cb95a-c69d-4648-9369-e3c150f9faa8", "user": "monitor"} 2024-07-10T14:37:40.734Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b5426ce6-eb17-41fa-bd27-1c84715975bf", "user": "monitor"} 2024-07-10T14:37:40.760Z INFO Password updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b5426ce6-eb17-41fa-bd27-1c84715975bf", "user": "monitor"} 2024-07-10T14:37:40.772Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b5426ce6-eb17-41fa-bd27-1c84715975bf", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-10T14:37:40.781Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "b5426ce6-eb17-41fa-bd27-1c84715975bf", "user": "monitor"} 2024-07-10T14:37:46.191Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "5dec45d9-c199-4a1a-b9c3-a70a05af1c95", "user": "monitor"} 2024-07-10T14:37:51.581Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "fdaa49b1-35f4-4fd8-990b-f34af203ddae", "user": "monitor"} 2024-07-10T14:37:56.974Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "020b5091-e7d8-453e-82db-8a7fcd78ee97", "user": "monitor"} 2024-07-10T14:38:02.445Z INFO Password updated but old one not discarded {"controller": "pxc-controller", "namespace": "users-23355", "name": "some-name", "reconcileID": "59c15ba3-b4d4-40e8-bf85-05601cd94aca", "user": "monitor"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1227 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-23355 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.HH4XDlEeRd ++ mktemp + local LAST_ERR=/tmp/tmp.y2MlTAqRtk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.HH4XDlEeRd perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.y2MlTAqRtk + rm /tmp/tmp.HH4XDlEeRd /tmp/tmp.y2MlTAqRtk + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.JVt1bnfEel ++ mktemp + local LAST_ERR=/tmp/tmp.UG8Uc8kDuL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JVt1bnfEel No resources found + cat /tmp/tmp.UG8Uc8kDuL + rm /tmp/tmp.JVt1bnfEel /tmp/tmp.UG8Uc8kDuL + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.bC9TU3mYDR ++ mktemp + local LAST_ERR=/tmp/tmp.UaTPXTbsZ7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.bC9TU3mYDR No resources found + cat /tmp/tmp.UaTPXTbsZ7 + rm /tmp/tmp.bC9TU3mYDR /tmp/tmp.UaTPXTbsZ7 + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.dFl6cblBWI ++ mktemp + local LAST_ERR=/tmp/tmp.aC4BlzRxF0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dFl6cblBWI validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.aC4BlzRxF0 + rm /tmp/tmp.dFl6cblBWI /tmp/tmp.aC4BlzRxF0 + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-23355 + rm -rf /tmp/tmp.ocCIpgbhE8 + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.osARgQTO34 + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed+ local LAST_OUT=/tmp/tmp.eLczz5s6cc ----------------------------------------------------------------------------------- ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.cYaK93JPLZ + local exit_status=0 + local LAST_ERR=/tmp/tmp.Tdbl5PhDoI + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-23355