Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-30908 + local ns=users-30908 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-30781 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.apRUk0h87E ++ mktemp + local LAST_ERR=/tmp/tmp.U49ma52HyU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.apRUk0h87E perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.U49ma52HyU + rm /tmp/tmp.apRUk0h87E /tmp/tmp.U49ma52HyU + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.wTLbWXFUHC ++ mktemp + local LAST_ERR=/tmp/tmp.Kunw32ZBUf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.wTLbWXFUHC No resources found + cat /tmp/tmp.Kunw32ZBUf + rm /tmp/tmp.wTLbWXFUHC /tmp/tmp.Kunw32ZBUf + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ej0MS4I2sc ++ mktemp + local LAST_ERR=/tmp/tmp.iODKPgTgnW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ej0MS4I2sc No resources found + cat /tmp/tmp.iODKPgTgnW + rm /tmp/tmp.ej0MS4I2sc /tmp/tmp.iODKPgTgnW + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.Vk8FW7UR80 ++ mktemp + local LAST_OUT=/tmp/tmp.A9UlN13kUc ++ mktemp + local LAST_ERR=/tmp/tmp.nhRvydRgzF + local exit_status=0 + local LAST_ERR=/tmp/tmp.qRBRaL63hc ++ seq 0 2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Vk8FW7UR80 + cat /tmp/tmp.nhRvydRgzF + rm /tmp/tmp.Vk8FW7UR80 /tmp/tmp.nhRvydRgzF + return 0 namespace "users-30781" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.A9UlN13kUc namespace "pxc-operator" deleted + cat /tmp/tmp.qRBRaL63hc + rm /tmp/tmp.A9UlN13kUc /tmp/tmp.qRBRaL63hc + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.AX8RLeqNCB ++ mktemp + local LAST_ERR=/tmp/tmp.CiZ6VPSJe9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.AX8RLeqNCB namespace/pxc-operator created + cat /tmp/tmp.CiZ6VPSJe9 + rm /tmp/tmp.AX8RLeqNCB /tmp/tmp.CiZ6VPSJe9 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zix92x86mY +++ mktemp ++ local LAST_ERR=/tmp/tmp.XcuNrhDitW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zix92x86mY ++ cat /tmp/tmp.XcuNrhDitW ++ rm /tmp/tmp.Zix92x86mY /tmp/tmp.XcuNrhDitW ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1716-7bc7e237-1-cluster2 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.3OWbjWcg8Y ++ mktemp + local LAST_ERR=/tmp/tmp.vZsfrj2VrY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1716-7bc7e237-1-cluster2 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3OWbjWcg8Y Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1716-7bc7e237-1-cluster2" modified. + cat /tmp/tmp.vZsfrj2VrY + rm /tmp/tmp.3OWbjWcg8Y /tmp/tmp.vZsfrj2VrY + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.iABKKdKliW ++ mktemp + local LAST_ERR=/tmp/tmp.ju5fKOToLF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iABKKdKliW customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.ju5fKOToLF + rm /tmp/tmp.iABKKdKliW /tmp/tmp.ju5fKOToLF + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.aFp42kZ4vw ++ mktemp + local LAST_ERR=/tmp/tmp.OJ97dPRs3r + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.aFp42kZ4vw clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.OJ97dPRs3r + rm /tmp/tmp.aFp42kZ4vw /tmp/tmp.OJ97dPRs3r + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/deploy/cw-operator.yaml + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1716-7bc7e237^' + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - ++ mktemp + local LAST_OUT=/tmp/tmp.VSGCHnKCvW ++ mktemp + local LAST_ERR=/tmp/tmp.x7OF8QfUOV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.VSGCHnKCvW deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.x7OF8QfUOV + rm /tmp/tmp.VSGCHnKCvW /tmp/tmp.x7OF8QfUOV + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.XHQlRhD1r4 ++ mktemp + local LAST_ERR=/tmp/tmp.XUYc6T8dLR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XHQlRhD1r4 pod/percona-xtradb-cluster-operator-7dd8445b9-4q9qx condition met + cat /tmp/tmp.XUYc6T8dLR + rm /tmp/tmp.XHQlRhD1r4 /tmp/tmp.XUYc6T8dLR + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z8yNel8HaO +++ mktemp ++ local LAST_ERR=/tmp/tmp.XkZbq8H7vc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z8yNel8HaO ++ cat /tmp/tmp.XkZbq8H7vc ++ rm /tmp/tmp.Z8yNel8HaO /tmp/tmp.XkZbq8H7vc ++ return 0 + wait_pod percona-xtradb-cluster-operator-7dd8445b9-4q9qx 480 pxc-operator + local pod=percona-xtradb-cluster-operator-7dd8445b9-4q9qx + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-7dd8445b9-4q9qx ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-7dd8445b9-4q9qx condition met percona-xtradb-cluster-operator-7dd8445b9-4q9qx.Ok + sleep 3 + create_namespace users-30908 + local namespace=users-30908 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-30908' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-30908 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-30908 + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.XAsKtDaEhA + local LAST_OUT=/tmp/tmp.4QjfgdeIyZ ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.FRkJBtvGfd + local exit_status=0 + local LAST_ERR=/tmp/tmp.EqgZ3aQFZp + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-30908 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-30908 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XAsKtDaEhA + cat /tmp/tmp.FRkJBtvGfd + rm /tmp/tmp.XAsKtDaEhA /tmp/tmp.FRkJBtvGfd + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-30908 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.4QjfgdeIyZ + cat /tmp/tmp.EqgZ3aQFZp Error from server (NotFound): namespaces "users-30908" not found + rm /tmp/tmp.4QjfgdeIyZ /tmp/tmp.EqgZ3aQFZp + return 1 + : + wait_for_delete namespace/users-30908 + local res=namespace/users-30908 + echo -n 'namespace/users-30908 - ' namespace/users-30908 - + set +o xtrace Error from server (NotFound): namespaces "users-30908" not found + desc 'create namespace users-30908' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-30908 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-30908 ++ mktemp + local LAST_OUT=/tmp/tmp.5gmAOPcc2Y ++ mktemp + local LAST_ERR=/tmp/tmp.0fb3IbMHr1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-30908 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5gmAOPcc2Y namespace/users-30908 created + cat /tmp/tmp.0fb3IbMHr1 + rm /tmp/tmp.5gmAOPcc2Y /tmp/tmp.0fb3IbMHr1 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.dNWLZiyRkW +++ mktemp ++ local LAST_ERR=/tmp/tmp.TE3CnQYtvR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dNWLZiyRkW ++ cat /tmp/tmp.TE3CnQYtvR ++ rm /tmp/tmp.dNWLZiyRkW /tmp/tmp.TE3CnQYtvR ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1716-7bc7e237-1-cluster2 --namespace=users-30908 ++ mktemp + local LAST_OUT=/tmp/tmp.1SCfA0sbjs ++ mktemp + local LAST_ERR=/tmp/tmp.0X1MHqzkF7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1716-7bc7e237-1-cluster2 --namespace=users-30908 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1SCfA0sbjs Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1716-7bc7e237-1-cluster2" modified. + cat /tmp/tmp.0X1MHqzkF7 + rm /tmp/tmp.1SCfA0sbjs /tmp/tmp.0X1MHqzkF7 + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ilxIKYXL8d ++ mktemp + local LAST_ERR=/tmp/tmp.PWs5gayE19 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ilxIKYXL8d secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.PWs5gayE19 + rm /tmp/tmp.ilxIKYXL8d /tmp/tmp.PWs5gayE19 + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.1t6Onk2nr9 ++ mktemp + local LAST_ERR=/tmp/tmp.lEHF0zG3Zf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1t6Onk2nr9 secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.lEHF0zG3Zf + rm /tmp/tmp.1t6Onk2nr9 /tmp/tmp.lEHF0zG3Zf + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/client.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/client.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/client.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + local LAST_OUT=/tmp/tmp.MkfGltuYVy + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1716-7bc7e237#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-30908~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.XM8JIsuvRG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MkfGltuYVy deployment.apps/pxc-client created + cat /tmp/tmp.XM8JIsuvRG + rm /tmp/tmp.MkfGltuYVy /tmp/tmp.XM8JIsuvRG + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1716-7bc7e237#' + local LAST_OUT=/tmp/tmp.l4OkXNtN6P + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-30908~ + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + local LAST_ERR=/tmp/tmp.SuCLIyLPGG + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.l4OkXNtN6P perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.SuCLIyLPGG + rm /tmp/tmp.l4OkXNtN6P /tmp/tmp.SuCLIyLPGG + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.lpStGmpl2b ++++ mktemp +++ local LAST_ERR=/tmp/tmp.SeK5xJrNj0 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.lpStGmpl2b +++ cat /tmp/tmp.SeK5xJrNj0 +++ rm /tmp/tmp.lpStGmpl2b /tmp/tmp.SeK5xJrNj0 +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.2e0AyD5D2I ++++ mktemp +++ local LAST_ERR=/tmp/tmp.xh7U0m6Jby +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.2e0AyD5D2I +++ cat /tmp/tmp.xh7U0m6Jby +++ rm /tmp/tmp.2e0AyD5D2I /tmp/tmp.xh7U0m6Jby +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-30908 ++ mktemp + local LAST_OUT=/tmp/tmp.OD1pOw4nVv ++ mktemp + local LAST_ERR=/tmp/tmp.0U7BuO7bna + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-30908 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-30908 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-30908 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.OD1pOw4nVv + cat /tmp/tmp.0U7BuO7bna error: no matching resources found + rm /tmp/tmp.OD1pOw4nVv /tmp/tmp.0U7BuO7bna + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ echo some-name-pxc-1 + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1gSmurT9tK +++ mktemp ++ local LAST_ERR=/tmp/tmp.tOcl12adnK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1gSmurT9tK ++ cat /tmp/tmp.tOcl12adnK Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.1gSmurT9tK /tmp/tmp.tOcl12adnK ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NKMj9oFHPA +++ mktemp ++ local LAST_ERR=/tmp/tmp.UkepxncZFo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NKMj9oFHPA ++ cat /tmp/tmp.UkepxncZFo ++ rm /tmp/tmp.NKMj9oFHPA /tmp/tmp.UkepxncZFo ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.3Fbrr8ZcdX +++ mktemp ++ local LAST_ERR=/tmp/tmp.iuuvyLVrU3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3Fbrr8ZcdX ++ cat /tmp/tmp.iuuvyLVrU3 ++ rm /tmp/tmp.3Fbrr8ZcdX /tmp/tmp.iuuvyLVrU3 ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R3YohGkF1Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.EH6u8FAinK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.R3YohGkF1Y ++ cat /tmp/tmp.EH6u8FAinK ++ rm /tmp/tmp.R3YohGkF1Y /tmp/tmp.EH6u8FAinK ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-1.sql /tmp/tmp.7NBKv3QUJN/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2YsUqwrir8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.59BEae16Jp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2YsUqwrir8 ++ cat /tmp/tmp.59BEae16Jp ++ rm /tmp/tmp.2YsUqwrir8 /tmp/tmp.59BEae16Jp ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-1.sql /tmp/tmp.7NBKv3QUJN/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JFxePtL5tB +++ mktemp ++ local LAST_ERR=/tmp/tmp.NE9MIEe2ri ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JFxePtL5tB ++ cat /tmp/tmp.NE9MIEe2ri ++ rm /tmp/tmp.JFxePtL5tB /tmp/tmp.NE9MIEe2ri ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-1.sql /tmp/tmp.7NBKv3QUJN/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8AdnAe2lGO +++ mktemp ++ local LAST_ERR=/tmp/tmp.lULRDVsLlX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8AdnAe2lGO ++ cat /tmp/tmp.lULRDVsLlX Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.8AdnAe2lGO /tmp/tmp.lULRDVsLlX ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.UZFScccp8b ++ mktemp + local LAST_ERR=/tmp/tmp.X78KlDZCEz + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UZFScccp8b secret/my-cluster-secrets patched + cat /tmp/tmp.X78KlDZCEz + rm /tmp/tmp.UZFScccp8b /tmp/tmp.X78KlDZCEz + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QJBVyMGHPO +++ mktemp ++ local LAST_ERR=/tmp/tmp.TeA4f83p3v ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QJBVyMGHPO ++ cat /tmp/tmp.TeA4f83p3v ++ rm /tmp/tmp.QJBVyMGHPO /tmp/tmp.TeA4f83p3v ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-4.sql /tmp/tmp.7NBKv3QUJN/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.UruQq8dDwy ++ mktemp + local LAST_ERR=/tmp/tmp.vV2bIir6dA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.UruQq8dDwy perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.vV2bIir6dA + rm /tmp/tmp.UruQq8dDwy /tmp/tmp.vV2bIir6dA + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.shoTGsLiSz +++ mktemp ++ local LAST_ERR=/tmp/tmp.t2C6jms8UU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.shoTGsLiSz ++ cat /tmp/tmp.t2C6jms8UU ++ rm /tmp/tmp.shoTGsLiSz /tmp/tmp.t2C6jms8UU ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Lljr10rbHB +++ mktemp ++ local LAST_ERR=/tmp/tmp.JCXUyZMS8D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Lljr10rbHB ++ cat /tmp/tmp.JCXUyZMS8D ++ rm /tmp/tmp.Lljr10rbHB /tmp/tmp.JCXUyZMS8D ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.EGkj6k5ysb ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.GjPfK7rQk9 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.EGkj6k5ysb +++++ cat /tmp/tmp.GjPfK7rQk9 +++++ rm /tmp/tmp.EGkj6k5ysb /tmp/tmp.GjPfK7rQk9 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.BtUnK3YFDt ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.xdYb76zfHZ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.BtUnK3YFDt +++++ cat /tmp/tmp.xdYb76zfHZ +++++ rm /tmp/tmp.BtUnK3YFDt /tmp/tmp.xdYb76zfHZ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.slMNwCP10M +++ mktemp ++ local LAST_ERR=/tmp/tmp.3H0fKmOaX7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.slMNwCP10M ++ cat /tmp/tmp.3H0fKmOaX7 ++ rm /tmp/tmp.slMNwCP10M /tmp/tmp.3H0fKmOaX7 ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.YS5YybRMd2 ++ mktemp + local LAST_ERR=/tmp/tmp.rJLxRodDvY + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YS5YybRMd2 secret/my-cluster-secrets patched + cat /tmp/tmp.rJLxRodDvY + rm /tmp/tmp.YS5YybRMd2 /tmp/tmp.rJLxRodDvY + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uJxxJtA8Ek +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZrRJeKLhCd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uJxxJtA8Ek ++ cat /tmp/tmp.ZrRJeKLhCd ++ rm /tmp/tmp.uJxxJtA8Ek /tmp/tmp.ZrRJeKLhCd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5OAVjm7ENC +++ mktemp ++ local LAST_ERR=/tmp/tmp.pPLhxYUoeS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5OAVjm7ENC ++ cat /tmp/tmp.pPLhxYUoeS ++ rm /tmp/tmp.5OAVjm7ENC /tmp/tmp.pPLhxYUoeS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6zrjScN7Gu +++ mktemp ++ local LAST_ERR=/tmp/tmp.ao728zFeWk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6zrjScN7Gu ++ cat /tmp/tmp.ao728zFeWk ++ rm /tmp/tmp.6zrjScN7Gu /tmp/tmp.ao728zFeWk ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NZp01aTEyh +++ mktemp ++ local LAST_ERR=/tmp/tmp.c0TJ043M7F ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NZp01aTEyh ++ cat /tmp/tmp.c0TJ043M7F ++ rm /tmp/tmp.NZp01aTEyh /tmp/tmp.c0TJ043M7F ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.JPNXUuJ9pp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.kgWdMloYKR +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.JPNXUuJ9pp +++++ cat /tmp/tmp.kgWdMloYKR +++++ rm /tmp/tmp.JPNXUuJ9pp /tmp/tmp.kgWdMloYKR +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.cxn4LmKiZ0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.C61fFd9748 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.cxn4LmKiZ0 +++++ cat /tmp/tmp.C61fFd9748 +++++ rm /tmp/tmp.cxn4LmKiZ0 /tmp/tmp.C61fFd9748 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.giXD46so0C +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ry3d11bVlw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.giXD46so0C ++ cat /tmp/tmp.Ry3d11bVlw ++ rm /tmp/tmp.giXD46so0C /tmp/tmp.Ry3d11bVlw ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-2.sql /tmp/tmp.7NBKv3QUJN/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-2.sql /tmp/tmp.7NBKv3QUJN/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-2.sql /tmp/tmp.7NBKv3QUJN/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.0WpjpLPxCE ++ mktemp + local LAST_ERR=/tmp/tmp.rSQgkxF3Tg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0WpjpLPxCE perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.rSQgkxF3Tg + rm /tmp/tmp.0WpjpLPxCE /tmp/tmp.rSQgkxF3Tg + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.zhDgovXHQv ++ mktemp + local LAST_ERR=/tmp/tmp.7q327Ua5m1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zhDgovXHQv secret/my-cluster-secrets patched + cat /tmp/tmp.7q327Ua5m1 + rm /tmp/tmp.zhDgovXHQv /tmp/tmp.7q327Ua5m1 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5GNJ0rMXyC +++ mktemp ++ local LAST_ERR=/tmp/tmp.JHR4Mhj8p6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5GNJ0rMXyC ++ cat /tmp/tmp.JHR4Mhj8p6 ++ rm /tmp/tmp.5GNJ0rMXyC /tmp/tmp.JHR4Mhj8p6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yXrPncMzJq +++ mktemp ++ local LAST_ERR=/tmp/tmp.LIceZ0zIls ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yXrPncMzJq ++ cat /tmp/tmp.LIceZ0zIls ++ rm /tmp/tmp.yXrPncMzJq /tmp/tmp.LIceZ0zIls ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PCr02pXElw +++ mktemp ++ local LAST_ERR=/tmp/tmp.lhEsOL8q5l ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PCr02pXElw ++ cat /tmp/tmp.lhEsOL8q5l ++ rm /tmp/tmp.PCr02pXElw /tmp/tmp.lhEsOL8q5l ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iR9O34lXvP +++ mktemp ++ local LAST_ERR=/tmp/tmp.NBP5EQSAOW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iR9O34lXvP ++ cat /tmp/tmp.NBP5EQSAOW ++ rm /tmp/tmp.iR9O34lXvP /tmp/tmp.NBP5EQSAOW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nTQjP9HbFE +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZbWDdDngpA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nTQjP9HbFE ++ cat /tmp/tmp.ZbWDdDngpA ++ rm /tmp/tmp.nTQjP9HbFE /tmp/tmp.ZbWDdDngpA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ikp2HiQTEC +++ mktemp ++ local LAST_ERR=/tmp/tmp.KI6aRjxpMs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ikp2HiQTEC ++ cat /tmp/tmp.KI6aRjxpMs ++ rm /tmp/tmp.ikp2HiQTEC /tmp/tmp.KI6aRjxpMs ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VwBq34NLwF +++ mktemp ++ local LAST_ERR=/tmp/tmp.u3CHco5GYY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VwBq34NLwF ++ cat /tmp/tmp.u3CHco5GYY ++ rm /tmp/tmp.VwBq34NLwF /tmp/tmp.u3CHco5GYY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5aTWwv5qLM +++ mktemp ++ local LAST_ERR=/tmp/tmp.aJan2qEea6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5aTWwv5qLM ++ cat /tmp/tmp.aJan2qEea6 ++ rm /tmp/tmp.5aTWwv5qLM /tmp/tmp.aJan2qEea6 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.r2f5slib5h +++ mktemp ++ local LAST_ERR=/tmp/tmp.MAF3sZN0fC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.r2f5slib5h ++ cat /tmp/tmp.MAF3sZN0fC ++ rm /tmp/tmp.r2f5slib5h /tmp/tmp.MAF3sZN0fC ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.fZhzBC9C3v ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.8LTVy8ajX0 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.fZhzBC9C3v +++++ cat /tmp/tmp.8LTVy8ajX0 +++++ rm /tmp/tmp.fZhzBC9C3v /tmp/tmp.8LTVy8ajX0 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.xXtC9gfqVd ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.R0P5sjXx9d +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.xXtC9gfqVd +++++ cat /tmp/tmp.R0P5sjXx9d +++++ rm /tmp/tmp.xXtC9gfqVd /tmp/tmp.R0P5sjXx9d +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xBNxSyWX7r +++ mktemp ++ local LAST_ERR=/tmp/tmp.ppdYTKawt8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xBNxSyWX7r ++ cat /tmp/tmp.ppdYTKawt8 ++ rm /tmp/tmp.xBNxSyWX7r /tmp/tmp.ppdYTKawt8 ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-3.sql /tmp/tmp.7NBKv3QUJN/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.44Jd7ZIIxk ++ mktemp + local LAST_ERR=/tmp/tmp.H5LS0TCOFU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.44Jd7ZIIxk secret/my-cluster-secrets patched + cat /tmp/tmp.H5LS0TCOFU + rm /tmp/tmp.44Jd7ZIIxk /tmp/tmp.H5LS0TCOFU + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.EHQLwR3fqL +++ mktemp ++ local LAST_ERR=/tmp/tmp.StFsq5CSLK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EHQLwR3fqL ++ cat /tmp/tmp.StFsq5CSLK ++ rm /tmp/tmp.EHQLwR3fqL /tmp/tmp.StFsq5CSLK ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jAo1gRqbdH +++ mktemp ++ local LAST_ERR=/tmp/tmp.EhFfUeAYv2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jAo1gRqbdH ++ cat /tmp/tmp.EhFfUeAYv2 ++ rm /tmp/tmp.jAo1gRqbdH /tmp/tmp.EhFfUeAYv2 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Q5tyNyPYPi +++ mktemp ++ local LAST_ERR=/tmp/tmp.mq0mfnNqaX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Q5tyNyPYPi ++ cat /tmp/tmp.mq0mfnNqaX ++ rm /tmp/tmp.Q5tyNyPYPi /tmp/tmp.mq0mfnNqaX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FsppOV0rBH +++ mktemp ++ local LAST_ERR=/tmp/tmp.KyASlHrJSJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FsppOV0rBH ++ cat /tmp/tmp.KyASlHrJSJ ++ rm /tmp/tmp.FsppOV0rBH /tmp/tmp.KyASlHrJSJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cwq5QiFfsR +++ mktemp ++ local LAST_ERR=/tmp/tmp.P6PWqnJNk7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cwq5QiFfsR ++ cat /tmp/tmp.P6PWqnJNk7 ++ rm /tmp/tmp.cwq5QiFfsR /tmp/tmp.P6PWqnJNk7 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HOlovfOhYC +++ mktemp ++ local LAST_ERR=/tmp/tmp.BSmFZl09Ef ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HOlovfOhYC ++ cat /tmp/tmp.BSmFZl09Ef ++ rm /tmp/tmp.HOlovfOhYC /tmp/tmp.BSmFZl09Ef ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.YvINkr92KR ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.w8pQlDYjN8 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.YvINkr92KR +++++ cat /tmp/tmp.w8pQlDYjN8 +++++ rm /tmp/tmp.YvINkr92KR /tmp/tmp.w8pQlDYjN8 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.zFog0m4nkj ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.5SumsApR0t +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.zFog0m4nkj +++++ cat /tmp/tmp.5SumsApR0t +++++ rm /tmp/tmp.zFog0m4nkj /tmp/tmp.5SumsApR0t +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XmYjj9owaA +++ mktemp ++ local LAST_ERR=/tmp/tmp.3dwEDeKZIk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XmYjj9owaA ++ cat /tmp/tmp.3dwEDeKZIk ++ rm /tmp/tmp.XmYjj9owaA /tmp/tmp.3dwEDeKZIk ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.esGtskrUR5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4xLYnva3KC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.esGtskrUR5 ++ cat /tmp/tmp.4xLYnva3KC ++ rm /tmp/tmp.esGtskrUR5 /tmp/tmp.4xLYnva3KC ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-4.sql /tmp/tmp.7NBKv3QUJN/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.K9c6wiw0xu ++ mktemp + local LAST_ERR=/tmp/tmp.JU7NTkdO9e + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.K9c6wiw0xu secret/my-cluster-secrets patched + cat /tmp/tmp.JU7NTkdO9e + rm /tmp/tmp.K9c6wiw0xu /tmp/tmp.JU7NTkdO9e + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CPKik37jbl +++ mktemp ++ local LAST_ERR=/tmp/tmp.QNdKRL6yid ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CPKik37jbl ++ cat /tmp/tmp.QNdKRL6yid ++ rm /tmp/tmp.CPKik37jbl /tmp/tmp.QNdKRL6yid ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.J2RRVn2qXe +++ mktemp ++ local LAST_ERR=/tmp/tmp.Q51hc2GeKn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.J2RRVn2qXe ++ cat /tmp/tmp.Q51hc2GeKn ++ rm /tmp/tmp.J2RRVn2qXe /tmp/tmp.Q51hc2GeKn ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qvd9rw75Ff +++ mktemp ++ local LAST_ERR=/tmp/tmp.n9do1IwRWH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qvd9rw75Ff ++ cat /tmp/tmp.n9do1IwRWH ++ rm /tmp/tmp.qvd9rw75Ff /tmp/tmp.n9do1IwRWH ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.fyhpHNysJo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jR6UPqwaYg +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.fyhpHNysJo +++++ cat /tmp/tmp.jR6UPqwaYg +++++ rm /tmp/tmp.fyhpHNysJo /tmp/tmp.jR6UPqwaYg +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.igbckEymgt ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MnwqbjvtA0 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.igbckEymgt +++++ cat /tmp/tmp.MnwqbjvtA0 +++++ rm /tmp/tmp.igbckEymgt /tmp/tmp.MnwqbjvtA0 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SaM4t9c7cd +++ mktemp ++ local LAST_ERR=/tmp/tmp.8lsMo2IUmD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SaM4t9c7cd ++ cat /tmp/tmp.8lsMo2IUmD ++ rm /tmp/tmp.SaM4t9c7cd /tmp/tmp.8lsMo2IUmD ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e9Wf8Sg42O +++ mktemp ++ local LAST_ERR=/tmp/tmp.9BQaFNgx7I ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e9Wf8Sg42O ++ cat /tmp/tmp.9BQaFNgx7I ++ rm /tmp/tmp.e9Wf8Sg42O /tmp/tmp.9BQaFNgx7I ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-4.sql /tmp/tmp.7NBKv3QUJN/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.FkTQFSwvu4 ++ mktemp + local LAST_ERR=/tmp/tmp.ySTbAJ6mqP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.FkTQFSwvu4 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.ySTbAJ6mqP + rm /tmp/tmp.FkTQFSwvu4 /tmp/tmp.ySTbAJ6mqP + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.X0NIWboCZb +++ mktemp ++ local LAST_ERR=/tmp/tmp.sX0t8CZsyI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.X0NIWboCZb ++ cat /tmp/tmp.sX0t8CZsyI ++ rm /tmp/tmp.X0NIWboCZb /tmp/tmp.sX0t8CZsyI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2Z5yeWDhPd +++ mktemp ++ local LAST_ERR=/tmp/tmp.ummrJFx6xj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2Z5yeWDhPd ++ cat /tmp/tmp.ummrJFx6xj ++ rm /tmp/tmp.2Z5yeWDhPd /tmp/tmp.ummrJFx6xj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.B7tJJeViUw +++ mktemp ++ local LAST_ERR=/tmp/tmp.k77yhq2sn9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.B7tJJeViUw ++ cat /tmp/tmp.k77yhq2sn9 ++ rm /tmp/tmp.B7tJJeViUw /tmp/tmp.k77yhq2sn9 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oVpkJoYHpi +++ mktemp ++ local LAST_ERR=/tmp/tmp.UUHsOV54RI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oVpkJoYHpi ++ cat /tmp/tmp.UUHsOV54RI ++ rm /tmp/tmp.oVpkJoYHpi /tmp/tmp.UUHsOV54RI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zy733VMVhC +++ mktemp ++ local LAST_ERR=/tmp/tmp.6SHsscxPMh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zy733VMVhC ++ cat /tmp/tmp.6SHsscxPMh ++ rm /tmp/tmp.zy733VMVhC /tmp/tmp.6SHsscxPMh ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Nup3hMFILf +++ mktemp ++ local LAST_ERR=/tmp/tmp.ylmGfRyYQZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Nup3hMFILf ++ cat /tmp/tmp.ylmGfRyYQZ ++ rm /tmp/tmp.Nup3hMFILf /tmp/tmp.ylmGfRyYQZ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.81E1C3y22P +++ mktemp ++ local LAST_ERR=/tmp/tmp.jwgQwTzmBb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.81E1C3y22P ++ cat /tmp/tmp.jwgQwTzmBb ++ rm /tmp/tmp.81E1C3y22P /tmp/tmp.jwgQwTzmBb ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lDUWMqR191 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KGhoTngq69 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lDUWMqR191 ++ cat /tmp/tmp.KGhoTngq69 ++ rm /tmp/tmp.lDUWMqR191 /tmp/tmp.KGhoTngq69 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.QYoef9aJj2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.fMkNZCzkAN +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.QYoef9aJj2 +++++ cat /tmp/tmp.fMkNZCzkAN +++++ rm /tmp/tmp.QYoef9aJj2 /tmp/tmp.fMkNZCzkAN +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.tpUeUocahC ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.uZsAUS18mr +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.tpUeUocahC +++++ cat /tmp/tmp.uZsAUS18mr +++++ rm /tmp/tmp.tpUeUocahC /tmp/tmp.uZsAUS18mr +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KjLOcHEIbe +++ mktemp ++ local LAST_ERR=/tmp/tmp.VGZoWDSfr8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KjLOcHEIbe ++ cat /tmp/tmp.VGZoWDSfr8 ++ rm /tmp/tmp.KjLOcHEIbe /tmp/tmp.VGZoWDSfr8 ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ACTmQvmPyj ++ mktemp + local LAST_ERR=/tmp/tmp.uhHnL2JZUQ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ACTmQvmPyj secret/my-cluster-secrets-2 patched + cat /tmp/tmp.uhHnL2JZUQ + rm /tmp/tmp.ACTmQvmPyj /tmp/tmp.uhHnL2JZUQ + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F3ZynyK25m +++ mktemp ++ local LAST_ERR=/tmp/tmp.xq8qZZTDGp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F3ZynyK25m ++ cat /tmp/tmp.xq8qZZTDGp ++ rm /tmp/tmp.F3ZynyK25m /tmp/tmp.xq8qZZTDGp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JD3F5PARhI +++ mktemp ++ local LAST_ERR=/tmp/tmp.82IcwW4Oon ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JD3F5PARhI ++ cat /tmp/tmp.82IcwW4Oon ++ rm /tmp/tmp.JD3F5PARhI /tmp/tmp.82IcwW4Oon ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.XqR2J6NSYG +++ mktemp ++ local LAST_ERR=/tmp/tmp.5NpclSoJ29 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.XqR2J6NSYG ++ cat /tmp/tmp.5NpclSoJ29 ++ rm /tmp/tmp.XqR2J6NSYG /tmp/tmp.5NpclSoJ29 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7nvw44L5yP ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.zYvP3sTY5h +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7nvw44L5yP +++++ cat /tmp/tmp.zYvP3sTY5h +++++ rm /tmp/tmp.7nvw44L5yP /tmp/tmp.zYvP3sTY5h +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.LqaJzJJA3O ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.jnTRXwyxU4 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.LqaJzJJA3O +++++ cat /tmp/tmp.jnTRXwyxU4 +++++ rm /tmp/tmp.LqaJzJJA3O /tmp/tmp.jnTRXwyxU4 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I0TKizVuEZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.b4IOTubml0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I0TKizVuEZ ++ cat /tmp/tmp.b4IOTubml0 ++ rm /tmp/tmp.I0TKizVuEZ /tmp/tmp.b4IOTubml0 ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PULvqnpB3Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.u6NpCLPAnK ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PULvqnpB3Z ++ cat /tmp/tmp.u6NpCLPAnK ++ rm /tmp/tmp.PULvqnpB3Z /tmp/tmp.u6NpCLPAnK ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-4.sql /tmp/tmp.7NBKv3QUJN/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.zSBewKokBz +++ mktemp ++ local LAST_ERR=/tmp/tmp.6tLVKY30XW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zSBewKokBz ++ cat /tmp/tmp.6tLVKY30XW ++ rm /tmp/tmp.zSBewKokBz /tmp/tmp.6tLVKY30XW ++ return 0 + newpass=5ycA~Kn-p29zt61InFd + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''5ycA~Kn-p29zt61InFd'\'';' '-h some-name-pxc -uroot -p'\''5ycA~Kn-p29zt61InFd'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''5ycA~Kn-p29zt61InFd'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''5ycA~Kn-p29zt61InFd'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pMrwhbjOKe +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jt738BsAFA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pMrwhbjOKe ++ cat /tmp/tmp.Jt738BsAFA ++ rm /tmp/tmp.pMrwhbjOKe /tmp/tmp.Jt738BsAFA ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''5ycA~Kn-p29zt61InFd'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''5ycA~Kn-p29zt61InFd'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''5ycA~Kn-p29zt61InFd'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''5ycA~Kn-p29zt61InFd'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.auxLJOk8fi +++ mktemp ++ local LAST_ERR=/tmp/tmp.DAjkhs8Oi7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.auxLJOk8fi ++ cat /tmp/tmp.DAjkhs8Oi7 ++ rm /tmp/tmp.auxLJOk8fi /tmp/tmp.DAjkhs8Oi7 ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-4.sql /tmp/tmp.7NBKv3QUJN/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.6H67z5CIBR +++ mktemp ++ local LAST_ERR=/tmp/tmp.zf6Xdu7ZQ1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6H67z5CIBR ++ cat /tmp/tmp.zf6Xdu7ZQ1 ++ rm /tmp/tmp.6H67z5CIBR /tmp/tmp.zf6Xdu7ZQ1 ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ualMZLQUUL ++ mktemp + local LAST_ERR=/tmp/tmp.pVhJR28Ald + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ualMZLQUUL secret/my-cluster-secrets-2 configured + cat /tmp/tmp.pVhJR28Ald Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.ualMZLQUUL /tmp/tmp.pVhJR28Ald + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WkNqHkaVBc +++ mktemp ++ local LAST_ERR=/tmp/tmp.s47rhp2PLg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WkNqHkaVBc ++ cat /tmp/tmp.s47rhp2PLg ++ rm /tmp/tmp.WkNqHkaVBc /tmp/tmp.s47rhp2PLg ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-4.sql /tmp/tmp.7NBKv3QUJN/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.EeHYwmJgxS + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-30908~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1716-7bc7e237#' ++ mktemp + local LAST_ERR=/tmp/tmp.tUz43SuR3r + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EeHYwmJgxS perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.tUz43SuR3r + rm /tmp/tmp.EeHYwmJgxS /tmp/tmp.tUz43SuR3r + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NrwCOUcp62 +++ mktemp ++ local LAST_ERR=/tmp/tmp.usXUpux7ev ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NrwCOUcp62 ++ cat /tmp/tmp.usXUpux7ev ++ rm /tmp/tmp.NrwCOUcp62 /tmp/tmp.usXUpux7ev ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5jIWRdcswz +++ mktemp ++ local LAST_ERR=/tmp/tmp.BrVUif74tr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5jIWRdcswz ++ cat /tmp/tmp.BrVUif74tr ++ rm /tmp/tmp.5jIWRdcswz /tmp/tmp.BrVUif74tr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RDy1zGGFsQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.XpMtpWuy6e ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RDy1zGGFsQ ++ cat /tmp/tmp.XpMtpWuy6e ++ rm /tmp/tmp.RDy1zGGFsQ /tmp/tmp.XpMtpWuy6e ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1XYQrMo1LU +++ mktemp ++ local LAST_ERR=/tmp/tmp.kmd825hdTG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1XYQrMo1LU ++ cat /tmp/tmp.kmd825hdTG ++ rm /tmp/tmp.1XYQrMo1LU /tmp/tmp.kmd825hdTG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tx5f8vS2EH +++ mktemp ++ local LAST_ERR=/tmp/tmp.T8pBiRSmXM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tx5f8vS2EH ++ cat /tmp/tmp.T8pBiRSmXM ++ rm /tmp/tmp.tx5f8vS2EH /tmp/tmp.T8pBiRSmXM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sXlptrOdLf +++ mktemp ++ local LAST_ERR=/tmp/tmp.6lxQzSREfa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sXlptrOdLf ++ cat /tmp/tmp.6lxQzSREfa ++ rm /tmp/tmp.sXlptrOdLf /tmp/tmp.6lxQzSREfa ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Yio1fjr0fL +++ mktemp ++ local LAST_ERR=/tmp/tmp.7kXB0LIu14 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Yio1fjr0fL ++ cat /tmp/tmp.7kXB0LIu14 ++ rm /tmp/tmp.Yio1fjr0fL /tmp/tmp.7kXB0LIu14 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zmmx0zVzws +++ mktemp ++ local LAST_ERR=/tmp/tmp.4pl8R3Gs10 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zmmx0zVzws ++ cat /tmp/tmp.4pl8R3Gs10 ++ rm /tmp/tmp.zmmx0zVzws /tmp/tmp.4pl8R3Gs10 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jYWrRwAFoI +++ mktemp ++ local LAST_ERR=/tmp/tmp.4tEqZH9TwT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jYWrRwAFoI ++ cat /tmp/tmp.4tEqZH9TwT ++ rm /tmp/tmp.jYWrRwAFoI /tmp/tmp.4tEqZH9TwT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tgqz4qvdoO +++ mktemp ++ local LAST_ERR=/tmp/tmp.cZ8nPLUnn7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tgqz4qvdoO ++ cat /tmp/tmp.cZ8nPLUnn7 ++ rm /tmp/tmp.tgqz4qvdoO /tmp/tmp.cZ8nPLUnn7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4T1nbJyzWA +++ mktemp ++ local LAST_ERR=/tmp/tmp.M0FBtSfct7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4T1nbJyzWA ++ cat /tmp/tmp.M0FBtSfct7 ++ rm /tmp/tmp.4T1nbJyzWA /tmp/tmp.M0FBtSfct7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gO5FYhaU64 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Vkzg2VoomD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gO5FYhaU64 ++ cat /tmp/tmp.Vkzg2VoomD ++ rm /tmp/tmp.gO5FYhaU64 /tmp/tmp.Vkzg2VoomD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VK7HHbKjAH +++ mktemp ++ local LAST_ERR=/tmp/tmp.oYDTPfQNl8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VK7HHbKjAH ++ cat /tmp/tmp.oYDTPfQNl8 ++ rm /tmp/tmp.VK7HHbKjAH /tmp/tmp.oYDTPfQNl8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QAGjdGIUnz +++ mktemp ++ local LAST_ERR=/tmp/tmp.D9Gi3mdcvs ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QAGjdGIUnz ++ cat /tmp/tmp.D9Gi3mdcvs ++ rm /tmp/tmp.QAGjdGIUnz /tmp/tmp.D9Gi3mdcvs ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.H3ZjDDEX68 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jXnB6UAON2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.H3ZjDDEX68 ++ cat /tmp/tmp.jXnB6UAON2 ++ rm /tmp/tmp.H3ZjDDEX68 /tmp/tmp.jXnB6UAON2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.D7QGrRJhUZ ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.w1l1KjPL4T +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.D7QGrRJhUZ +++++ cat /tmp/tmp.w1l1KjPL4T +++++ rm /tmp/tmp.D7QGrRJhUZ /tmp/tmp.w1l1KjPL4T +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.93eUaK9O8d +++ mktemp ++ local LAST_ERR=/tmp/tmp.ylvo0fF7mQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.93eUaK9O8d ++ cat /tmp/tmp.ylvo0fF7mQ ++ rm /tmp/tmp.93eUaK9O8d /tmp/tmp.ylvo0fF7mQ ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.iT36eRYS72 ++ mktemp + local LAST_ERR=/tmp/tmp.dGfb2T4bm5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iT36eRYS72 secret/my-cluster-secrets patched + cat /tmp/tmp.dGfb2T4bm5 + rm /tmp/tmp.iT36eRYS72 /tmp/tmp.dGfb2T4bm5 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MD5nBLTdQ2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.qceDhQCDdA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MD5nBLTdQ2 ++ cat /tmp/tmp.qceDhQCDdA ++ rm /tmp/tmp.MD5nBLTdQ2 /tmp/tmp.qceDhQCDdA ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fxaCQskzTH +++ mktemp ++ local LAST_ERR=/tmp/tmp.oMMvL3oeQg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fxaCQskzTH ++ cat /tmp/tmp.oMMvL3oeQg ++ rm /tmp/tmp.fxaCQskzTH /tmp/tmp.oMMvL3oeQg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6ylqg5eOfy +++ mktemp ++ local LAST_ERR=/tmp/tmp.K5fxnTtfDQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6ylqg5eOfy ++ cat /tmp/tmp.K5fxnTtfDQ ++ rm /tmp/tmp.6ylqg5eOfy /tmp/tmp.K5fxnTtfDQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.L9fZbaXLOg +++ mktemp ++ local LAST_ERR=/tmp/tmp.riOtA2mdhY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.L9fZbaXLOg ++ cat /tmp/tmp.riOtA2mdhY ++ rm /tmp/tmp.L9fZbaXLOg /tmp/tmp.riOtA2mdhY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9yB9mUmeYJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.RXw6KrfCAT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9yB9mUmeYJ ++ cat /tmp/tmp.RXw6KrfCAT ++ rm /tmp/tmp.9yB9mUmeYJ /tmp/tmp.RXw6KrfCAT ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hZAaWnNkhh +++ mktemp ++ local LAST_ERR=/tmp/tmp.zuYM58sMom ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hZAaWnNkhh ++ cat /tmp/tmp.zuYM58sMom ++ rm /tmp/tmp.hZAaWnNkhh /tmp/tmp.zuYM58sMom ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.mbpTCv3fXE ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.D0SJeB3z9N +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.mbpTCv3fXE +++++ cat /tmp/tmp.D0SJeB3z9N +++++ rm /tmp/tmp.mbpTCv3fXE /tmp/tmp.D0SJeB3z9N +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.o2b48VaUZr +++ mktemp ++ local LAST_ERR=/tmp/tmp.O6brKKJO7R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.o2b48VaUZr ++ cat /tmp/tmp.O6brKKJO7R ++ rm /tmp/tmp.o2b48VaUZr /tmp/tmp.O6brKKJO7R ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PbtY2NZbVD +++ mktemp ++ local LAST_ERR=/tmp/tmp.drmeS3JH6m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PbtY2NZbVD ++ cat /tmp/tmp.drmeS3JH6m ++ rm /tmp/tmp.PbtY2NZbVD /tmp/tmp.drmeS3JH6m ++ return 0 + client_pod=pxc-client-64b479df95-t46s5 + wait_pod pxc-client-64b479df95-t46s5 + local pod=pxc-client-64b479df95-t46s5 + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-t46s5 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-t46s5 condition met pxc-client-64b479df95-t46s5.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.7NBKv3QUJN/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1716/e2e-tests/users/compare/select-3.sql /tmp/tmp.7NBKv3QUJN/select-3.sql + destroy users-30908 + local namespace=users-30908 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' + sort -u + tee /tmp/tmp.7NBKv3QUJN/operator.log + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.mMXobFc4Y2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.slhvPmqwug ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.mMXobFc4Y2 ++ cat /tmp/tmp.slhvPmqwug ++ rm /tmp/tmp.mMXobFc4Y2 /tmp/tmp.slhvPmqwug ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-7dd8445b9-4q9qx ++ mktemp + local LAST_OUT=/tmp/tmp.iWCDtpxXTF ++ mktemp + local LAST_ERR=/tmp/tmp.dhFpYieBpR + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-7dd8445b9-4q9qx + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.iWCDtpxXTF + cat /tmp/tmp.dhFpYieBpR + rm /tmp/tmp.iWCDtpxXTF /tmp/tmp.dhFpYieBpR + return 0 2024-05-23T03:38:18.591Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1320000"} 2024-05-23T03:38:18.592Z INFO setup Manager starting up {"gitCommit": "7bc7e2372091ac733945cde052cb72ba69639704", "gitBranch": "PR-1716-7bc7e237", "buildTime": "2024-05-23T01:37:38Z", "goVersion": "go1.22.3", "os": "linux", "arch": "amd64"} 2024-05-23T03:38:18.592Z INFO setup Registering Components. 2024-05-23T03:38:22.334Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-05-23T03:38:22.337Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-05-23T03:38:22.337Z INFO controller-runtime.metrics Starting metrics server 2024-05-23T03:38:22.337Z INFO controller-runtime.webhook Starting webhook server 2024-05-23T03:38:22.337Z INFO setup Starting the Cmd. 2024-05-23T03:38:22.337Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-05-23T03:38:22.338Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-05-23T03:38:22.338Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-05-23T03:38:22.338Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-05-23T03:38:22.438Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-05-23T03:38:22.455Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-05-23T03:38:22.456Z DEBUG events percona-xtradb-cluster-operator-7dd8445b9-4q9qx_c42a3c5e-b296-4a3e-85c1-5bd7c2af192a became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"d9b84bd2-2c7b-4475-b99f-32e4513d22ce","apiVersion":"coordination.k8s.io/v1","resourceVersion":"63976"}, "reason": "LeaderElection"} 2024-05-23T03:38:22.456Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-05-23T03:38:22.456Z INFO Starting Controller {"controller": "pxc-controller"} 2024-05-23T03:38:22.456Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-05-23T03:38:22.456Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: unknown type"} 2024-05-23T03:38:22.456Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: unknown type"} 2024-05-23T03:38:22.456Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: unknown type"} 2024-05-23T03:38:22.559Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-05-23T03:38:22.559Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-05-23T03:38:22.567Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-05-23T03:38:47.154Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "0fb4fc3c-7813-4f46-874b-6c2a3f733e3e", "version": "1.15.0"} 2024-05-23T03:40:07.105Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "8c271d6f-8893-42a5-9df7-262453076a2b", "user": "operator"} 2024-05-23T03:40:07.154Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "8c271d6f-8893-42a5-9df7-262453076a2b", "user": "monitor"} 2024-05-23T03:40:07.200Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "8c271d6f-8893-42a5-9df7-262453076a2b"} 2024-05-23T03:40:07.251Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "8c271d6f-8893-42a5-9df7-262453076a2b", "user": "xtrabackup"} 2024-05-23T03:40:07.299Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "8c271d6f-8893-42a5-9df7-262453076a2b"} 2024-05-23T03:40:07.435Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "8c271d6f-8893-42a5-9df7-262453076a2b", "err": "get primary pxc pod: not found"} 2024-05-23T03:40:12.180Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "126ac6f6-da1a-40ff-90d8-b51f8b2f7a14", "err": "get primary pxc pod: not found"} 2024-05-23T03:40:17.425Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "7c740184-7e21-40ab-ad96-7b31b79aa688", "err": "get primary pxc pod: not found"} 2024-05-23T03:40:22.600Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "b9f86d2a-0494-4d4c-904b-eb325d55e50d", "err": "get primary pxc pod: not found"} 2024-05-23T03:42:34.791Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "9cfbd0e6-871e-4d74-86b1-fc77a31880f6", "user": "root"} 2024-05-23T03:42:34.838Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "9cfbd0e6-871e-4d74-86b1-fc77a31880f6", "user": "replication"} 2024-05-23T03:42:35.030Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "9cfbd0e6-871e-4d74-86b1-fc77a31880f6", "new version": "5.7.44-48-57"} 2024-05-23T03:42:38.065Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "9cfbd0e6-871e-4d74-86b1-fc77a31880f6"} 2024-05-23T03:42:42.969Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "64a227e3-7190-404c-8253-ae25ac463541"} 2024-05-23T03:42:47.976Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "3b83b096-0892-4d27-9667-dfccf8b2c688"} 2024-05-23T03:42:53.544Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "a3534e53-ad3c-4264-bbfb-6c354ccc39f8"} 2024-05-23T03:42:58.551Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "3766cf80-8c9b-429b-a01d-828e1d8de549"} 2024-05-23T03:43:03.931Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "ab1bf90a-79b1-451c-b976-4a1f276f4622"} 2024-05-23T03:43:09.377Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "f590c627-5d8e-4896-9b95-7dbe78053d7e"} 2024-05-23T03:43:14.733Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "dd6453b2-5a07-4bda-934c-d201f62ed9a3"} 2024-05-23T03:43:20.031Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "5c9ddaea-d44f-48b7-8fce-74b1f6ba4ed3"} 2024-05-23T03:43:25.237Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "fc564323-8f02-481f-8f1c-56cbbb3ff237"} 2024-05-23T03:43:30.476Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "ef2b2129-6a18-4fea-9cc4-f9c64afa9b59"} 2024-05-23T03:43:35.739Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "5becea8e-551b-4bda-81d3-8ad86b2c41d4"} 2024-05-23T03:43:37.909Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "5cd993eb-efc2-4ab4-9588-5355237b504d", "user": "root"} 2024-05-23T03:43:37.943Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "5cd993eb-efc2-4ab4-9588-5355237b504d", "user": "root"} 2024-05-23T03:43:37.952Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "5cd993eb-efc2-4ab4-9588-5355237b504d", "secret": "some-name-mysql-init", "user": "root"} 2024-05-23T03:43:42.897Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "5cd993eb-efc2-4ab4-9588-5355237b504d"} 2024-05-23T03:43:42.906Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "5cd993eb-efc2-4ab4-9588-5355237b504d", "user": "root"} 2024-05-23T03:43:46.044Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "5cd993eb-efc2-4ab4-9588-5355237b504d"} 2024-05-23T03:43:51.238Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "be49b04e-9e88-4da0-a915-24f82ad5c2bb"} 2024-05-23T03:43:56.943Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "b2062c3f-d7a4-43cc-be50-4024865fc3f3"} 2024-05-23T03:44:00.238Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "010031b9-9729-470d-8da6-66eeae19ecc4", "error": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-23T03:44:19.446Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "cc8b5507-7453-44aa-a16c-99c4123b979b", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-23T03:44:24.374Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "4196aa23-fcbb-45c7-bc53-626478b748f7", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-23T03:44:25.787Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "b0fcba99-9153-4600-9ec3-f0d17e0f8870", "user": "proxyadmin"} 2024-05-23T03:44:25.787Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "b0fcba99-9153-4600-9ec3-f0d17e0f8870", "user": "proxyadmin"} 2024-05-23T03:44:25.852Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "b0fcba99-9153-4600-9ec3-f0d17e0f8870", "user": "proxyadmin"} 2024-05-23T03:44:25.869Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "b0fcba99-9153-4600-9ec3-f0d17e0f8870", "user": "proxyadmin"} 2024-05-23T03:44:25.869Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "b0fcba99-9153-4600-9ec3-f0d17e0f8870", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-05-23T03:44:26.467Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "b0fcba99-9153-4600-9ec3-f0d17e0f8870", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-23T03:45:25.145Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "5c246831-3b7d-4d86-965d-c31d832bf8d9"} 2024-05-23T03:45:30.277Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "4d4a0776-6348-4aac-8848-52dea3622e52"} 2024-05-23T03:45:35.877Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "0cda0014-66d8-4df0-8c24-6b439697cc95"} 2024-05-23T03:45:44.080Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "6fff5e3f-69ec-4cb2-8221-f2c3bf587126", "user": "xtrabackup"} 2024-05-23T03:45:44.103Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "6fff5e3f-69ec-4cb2-8221-f2c3bf587126", "user": "xtrabackup"} 2024-05-23T03:45:44.126Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "6fff5e3f-69ec-4cb2-8221-f2c3bf587126", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-23T03:45:44.146Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "6fff5e3f-69ec-4cb2-8221-f2c3bf587126", "user": "xtrabackup"} 2024-05-23T03:45:44.146Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "6fff5e3f-69ec-4cb2-8221-f2c3bf587126", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-05-23T03:45:49.370Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "6fff5e3f-69ec-4cb2-8221-f2c3bf587126"} 2024-05-23T03:47:41.918Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "1a9fa132-5730-4750-a856-5c0ecd059be0", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-30908 on 10.81.208.10:53: no such host"} 2024-05-23T03:47:46.998Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "f2093ab3-4d1a-4f94-9798-8c1daf0acb48", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-30908 on 10.81.208.10:53: no such host"} 2024-05-23T03:47:52.219Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "188d263e-2bc1-427e-af48-9661a9506ec7", "primary name": "some-name-pxc-0.some-name-pxc.users-30908.svc.cluster.local"} 2024-05-23T03:48:27.642Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "ebcadab4-0ded-4e30-adc7-cae3101c88f2"} 2024-05-23T03:48:32.241Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "e916e375-65e3-47d0-b2d7-760c1f05dd1c"} 2024-05-23T03:48:34.367Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "f3076c41-f8e8-45a3-be8b-338615a1297a", "user": "monitor"} 2024-05-23T03:48:34.389Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "f3076c41-f8e8-45a3-be8b-338615a1297a", "user": "monitor"} 2024-05-23T03:48:34.398Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "f3076c41-f8e8-45a3-be8b-338615a1297a", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-23T03:48:34.442Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "f3076c41-f8e8-45a3-be8b-338615a1297a", "user": "monitor"} 2024-05-23T03:48:34.454Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "f3076c41-f8e8-45a3-be8b-338615a1297a", "user": "monitor"} 2024-05-23T03:48:34.454Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "f3076c41-f8e8-45a3-be8b-338615a1297a", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-05-23T03:48:37.127Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "f3076c41-f8e8-45a3-be8b-338615a1297a", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-23T03:49:27.336Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "06e3bf5a-970a-4ea4-a987-cc2a56256165"} 2024-05-23T03:49:31.787Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "ed60bb41-e25f-4a41-a3c5-d93ebb444c03"} 2024-05-23T03:49:37.190Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "89c11f68-9177-4f06-84c3-fcdf14c116dc"} 2024-05-23T03:49:42.358Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "f4447ccd-7075-46e8-b853-eb493d55ff28"} 2024-05-23T03:49:47.747Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "9b7ebdc3-3aa1-413f-9981-e70b62099a52"} 2024-05-23T03:49:53.056Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "9966b2a1-2c4d-41f2-8da4-be674c5c3c31"} 2024-05-23T03:49:58.471Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "e1e26aa5-457c-4c51-9145-1519ed4b2990"} 2024-05-23T03:50:00.190Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "7ff92dd1-014f-477e-b577-e230b6c4d4a7", "user": "operator"} 2024-05-23T03:50:00.254Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "7ff92dd1-014f-477e-b577-e230b6c4d4a7", "user": "operator"} 2024-05-23T03:50:00.266Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "7ff92dd1-014f-477e-b577-e230b6c4d4a7", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-23T03:50:00.277Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "7ff92dd1-014f-477e-b577-e230b6c4d4a7", "user": "operator"} 2024-05-23T03:50:00.277Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "7ff92dd1-014f-477e-b577-e230b6c4d4a7", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-23T03:50:01.667Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "7ff92dd1-014f-477e-b577-e230b6c4d4a7", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-23T03:50:40.352Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "8a1cffab-dc25-42f6-abbf-48525f04f6a0", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.124.96.63:6032: i/o timeout"} 2024-05-23T03:50:45.614Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "4e582be6-6286-4415-a0be-d3041518c0c2"} 2024-05-23T03:50:54.355Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "59eb1b3b-0866-405b-a6cb-db419466ce6e"} 2024-05-23T03:51:00.141Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "908d256a-5b32-43e3-8f7c-a678d79941d6"} 2024-05-23T03:51:00.402Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "secrets": "my-cluster-secrets-2"} 2024-05-23T03:51:00.402Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "root"} 2024-05-23T03:51:00.439Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "root"} 2024-05-23T03:51:00.455Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "secret": "some-name-mysql-init", "user": "root"} 2024-05-23T03:51:05.775Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f"} 2024-05-23T03:51:05.787Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "root"} 2024-05-23T03:51:05.787Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "operator"} 2024-05-23T03:51:05.810Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "operator"} 2024-05-23T03:51:05.823Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-23T03:51:05.834Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "operator"} 2024-05-23T03:51:05.835Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "monitor"} 2024-05-23T03:51:05.858Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "monitor"} 2024-05-23T03:51:05.872Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-23T03:51:05.914Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "monitor"} 2024-05-23T03:51:05.923Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "monitor"} 2024-05-23T03:51:05.923Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "xtrabackup"} 2024-05-23T03:51:05.945Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "xtrabackup"} 2024-05-23T03:51:05.953Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-23T03:51:05.966Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "xtrabackup"} 2024-05-23T03:51:05.966Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "replication"} 2024-05-23T03:51:05.988Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "replication"} 2024-05-23T03:51:05.997Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-23T03:51:06.009Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "replication"} 2024-05-23T03:51:06.009Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "proxyadmin"} 2024-05-23T03:51:06.058Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "proxyadmin"} 2024-05-23T03:51:06.068Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "user": "proxyadmin"} 2024-05-23T03:51:06.068Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "last-applied-secret": "b5e5364fa69494365a26cd91c4091d8bb8685a78fe6650f526eab8fed621fa92"} 2024-05-23T03:51:06.068Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "last-applied-secret": "b5e5364fa69494365a26cd91c4091d8bb8685a78fe6650f526eab8fed621fa92"} 2024-05-23T03:51:06.260Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08cdf94a-edbf-45e7-a6d7-2a7e4b12bb2f", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-23T03:52:41.761Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "7627e4c9-d344-411d-a817-8fdf68617a71", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-30908 on 10.81.208.10:53: no such host"} 2024-05-23T03:52:42.056Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "2e0b7e9a-01f6-4a43-a894-3a3b1fabe78b", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-30908 on 10.81.208.10:53: no such host"} 2024-05-23T03:52:47.060Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "60aea7c8-b3c6-4a3d-85ef-d0178a4d031e", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-30908 on 10.81.208.10:53: no such host"} 2024-05-23T03:52:52.380Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "68e6a45b-4826-4d2b-beac-54d097c47128", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-30908 on 10.81.208.10:53: no such host"} 2024-05-23T03:52:57.632Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "72e23119-d5a4-4a85-b351-a905bfa4cbb5", "primary name": "some-name-pxc-0.some-name-pxc.users-30908.svc.cluster.local"} 2024-05-23T03:53:02.893Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "fa3421cd-40d5-41b8-b764-ba04f589b7e7", "primary name": "some-name-pxc-0.some-name-pxc.users-30908.svc.cluster.local"} 2024-05-23T03:53:08.066Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "6a75983f-a680-4b11-8d9a-0277ec7f7dc6", "primary name": "some-name-pxc-0.some-name-pxc.users-30908.svc.cluster.local"} 2024-05-23T03:53:13.328Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "987eb027-1c40-4d98-9889-a799af4223d8", "primary name": "some-name-pxc-0.some-name-pxc.users-30908.svc.cluster.local"} 2024-05-23T03:53:18.676Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "360dfd70-2756-4e17-a583-f4e8359079d0", "primary name": "some-name-pxc-0.some-name-pxc.users-30908.svc.cluster.local"} 2024-05-23T03:53:23.906Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "7c79e6fa-9eea-445e-aa4e-c21387628cca", "primary name": "some-name-pxc-0.some-name-pxc.users-30908.svc.cluster.local"} 2024-05-23T03:53:32.680Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "c71a1663-2a09-4502-89b6-7b279ea98d81"} 2024-05-23T03:53:37.636Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "5c08118e-a3b2-4a1b-ab33-5d21542b80bd"} 2024-05-23T03:53:43.256Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "648a7529-0883-42b7-b8cf-127d54481c3d"} 2024-05-23T03:53:48.534Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "d4644023-9df8-4831-8e4c-a450f7aaee04"} 2024-05-23T03:53:50.191Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "e25fc7db-c439-4050-b6af-2adf1275753f", "user": "operator"} 2024-05-23T03:53:50.218Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "e25fc7db-c439-4050-b6af-2adf1275753f", "user": "operator"} 2024-05-23T03:53:50.226Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "e25fc7db-c439-4050-b6af-2adf1275753f", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-23T03:53:50.236Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "e25fc7db-c439-4050-b6af-2adf1275753f", "user": "operator"} 2024-05-23T03:53:50.236Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "e25fc7db-c439-4050-b6af-2adf1275753f", "last-applied-secret": "d3c1b773fd370d9806e9b020be0e069f1decec7b51b62e25cafd94b317d0fad9"} 2024-05-23T03:53:51.716Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "e25fc7db-c439-4050-b6af-2adf1275753f", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-23T03:53:56.875Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "4ae88b54-14a1-477e-b9a2-89c5452f4f17", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-30908.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-23T03:54:33.827Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "7242be0f-3bea-40e4-a04b-947dd861c1d6"} 2024-05-23T03:54:42.482Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "2203c7ea-ad2a-4c9d-9279-6fa3fdba1bdd"} 2024-05-23T03:54:47.741Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "aa68341e-d7c5-43e3-b04f-efb4d76a8a1e"} 2024-05-23T03:54:53.560Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "cf1be18c-7635-4dc3-8f7a-dc5b4ded26fc"} 2024-05-23T03:54:58.470Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "16de4a29-eb4e-444f-a9ed-fdf5752d09a4"} 2024-05-23T03:55:04.706Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "d3997bbb-7b68-4233-8736-9b953aadd5bf"} 2024-05-23T03:55:09.783Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "08891c07-0a76-4a4f-9942-a101003d896f"} 2024-05-23T03:55:14.454Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "2810af66-2886-4184-be0d-134849151fdd"} 2024-05-23T03:55:19.768Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "01a9c6f0-6ad7-47de-b95a-78d407dbcfc4"} 2024-05-23T03:55:24.888Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "f8998434-b6a2-4c85-8e75-0cc01d54ff5d"} 2024-05-23T03:55:30.284Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "cf9dba24-8da0-4b59-a862-36055aab4347"} 2024-05-23T03:55:35.480Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "7ea13f8a-934e-4d8f-9e48-7aacde4778a5"} 2024-05-23T03:55:40.752Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "cbb2682f-87c9-45bd-abfc-6ef583e83374"} 2024-05-23T03:55:45.955Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "2fc39c5a-414e-42c3-b926-d510a2151ca9"} 2024-05-23T03:55:51.241Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "b7df4b7f-e627-409a-b86f-fa1edc349519"} 2024-05-23T03:55:53.152Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "root"} 2024-05-23T03:55:53.189Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "root"} 2024-05-23T03:55:53.197Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "secret": "some-name-mysql-init", "user": "root"} 2024-05-23T03:55:59.070Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3"} 2024-05-23T03:55:59.084Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "root"} 2024-05-23T03:55:59.085Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "monitor"} 2024-05-23T03:55:59.107Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "monitor"} 2024-05-23T03:55:59.117Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-23T03:55:59.161Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "monitor"} 2024-05-23T03:55:59.175Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "monitor"} 2024-05-23T03:55:59.175Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "xtrabackup"} 2024-05-23T03:55:59.200Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "xtrabackup"} 2024-05-23T03:55:59.209Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-23T03:55:59.220Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "xtrabackup"} 2024-05-23T03:55:59.220Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "proxyadmin"} 2024-05-23T03:55:59.265Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "proxyadmin"} 2024-05-23T03:55:59.280Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "user": "proxyadmin"} 2024-05-23T03:55:59.281Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "last-applied-secret": "ded85a35c0154d622215df9d4a588bc941af71bddcb51dc50fd9f59e828a6786"} 2024-05-23T03:55:59.281Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "last-applied-secret": "ded85a35c0154d622215df9d4a588bc941af71bddcb51dc50fd9f59e828a6786"} 2024-05-23T03:55:59.542Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "97bb6ff8-51d3-4f36-a0ca-6990df7956b3", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-23T03:56:11.256Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 8e613134-acb0-48ba-96e8-32116f6fd894 2024-05-23T03:58:17.899Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "root"} 2024-05-23T03:58:17.940Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "root"} 2024-05-23T03:58:17.949Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "secret": "some-name-mysql-init", "user": "root"} 2024-05-23T03:58:17.964Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "root"} 2024-05-23T03:58:17.964Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "operator"} 2024-05-23T03:58:17.988Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "operator"} 2024-05-23T03:58:18.002Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-23T03:58:18.011Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "operator"} 2024-05-23T03:58:18.012Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "monitor"} 2024-05-23T03:58:18.033Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "monitor"} 2024-05-23T03:58:18.046Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-23T03:58:18.059Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "monitor"} 2024-05-23T03:58:18.060Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "xtrabackup"} 2024-05-23T03:58:18.079Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "xtrabackup"} 2024-05-23T03:58:18.089Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-23T03:58:18.102Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "xtrabackup"} 2024-05-23T03:58:18.102Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "replication"} 2024-05-23T03:58:18.121Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "replication"} 2024-05-23T03:58:18.129Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-23T03:58:18.146Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "user": "replication"} 2024-05-23T03:58:18.146Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-23T03:58:18.146Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "30decd77-f015-4c67-8e33-8f29736f858f", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-23T04:01:06.580Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "076eede0-3328-4d2c-ae0b-221c74a1dee5", "user": "monitor"} 2024-05-23T04:01:06.607Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "076eede0-3328-4d2c-ae0b-221c74a1dee5", "user": "monitor"} 2024-05-23T04:01:06.614Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "076eede0-3328-4d2c-ae0b-221c74a1dee5", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-23T04:01:06.627Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "076eede0-3328-4d2c-ae0b-221c74a1dee5", "user": "monitor"} 2024-05-23T04:01:06.627Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "076eede0-3328-4d2c-ae0b-221c74a1dee5", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} 2024-05-23T04:01:15.160Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-30908", "name": "some-name", "reconcileID": "5b5d0586-69a8-4e90-b0a9-9bfaab99d7e7", "err": "get primary pxc pod: failed to get proxy connection: driver: bad connection"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/05/23 03:58:01 packets.go:37: read tcp 10.124.96.57:56914->10.81.210.7:3306: i/o timeout [mysql] 2024/05/23 04:00:34 packets.go:37: read tcp 10.124.96.57:49018->10.81.210.7:3306: i/o timeout [mysql] 2024/05/23 04:01:12 packets.go:37: unexpected EOF [mysql] 2024/05/23 04:01:13 packets.go:37: unexpected EOF [mysql] 2024/05/23 04:01:14 packets.go:37: unexpected EOF [mysql] 2024/05/23 04:01:15 packets.go:37: unexpected EOF sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-30908 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Qo6wnQpJgp ++ mktemp + local LAST_ERR=/tmp/tmp.zd4QKU5B9f + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Qo6wnQpJgp perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.zd4QKU5B9f + rm /tmp/tmp.Qo6wnQpJgp /tmp/tmp.zd4QKU5B9f + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.MWa9jplAge ++ mktemp + local LAST_ERR=/tmp/tmp.BvNqOjrdVT + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.MWa9jplAge No resources found + cat /tmp/tmp.BvNqOjrdVT + rm /tmp/tmp.MWa9jplAge /tmp/tmp.BvNqOjrdVT + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.7NrP1glYWE ++ mktemp + local LAST_ERR=/tmp/tmp.vx8gzmK8Z5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.7NrP1glYWE No resources found + cat /tmp/tmp.vx8gzmK8Z5 + rm /tmp/tmp.7NrP1glYWE /tmp/tmp.vx8gzmK8Z5 + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.a3B50d64R3 ++ mktemp + local LAST_ERR=/tmp/tmp.CXWl9aorAd + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.a3B50d64R3 validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.CXWl9aorAd + rm /tmp/tmp.a3B50d64R3 /tmp/tmp.CXWl9aorAd + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-30908 + rm -rf /tmp/tmp.7NBKv3QUJN ++ mktemp + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.veUGtqo9iq + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- ++ mktemp + local LAST_OUT=/tmp/tmp.sdhJ4tp20Z ++ mktemp + local LAST_ERR=/tmp/tmp.u46iFiEdIQ + local exit_status=0 + local LAST_ERR=/tmp/tmp.0qTNuDFJ7b + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-30908 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator