Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-21638 + local ns=users-21638 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + grep -v NAMESPACE + kubectl patch pxc -n users-6401 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.8fbt5y23ur ++ mktemp + local LAST_ERR=/tmp/tmp.m4PS62nzkj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.8fbt5y23ur perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.m4PS62nzkj + rm /tmp/tmp.8fbt5y23ur /tmp/tmp.m4PS62nzkj + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ti502x9gjQ ++ mktemp + local LAST_ERR=/tmp/tmp.kWhVqgevt2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ti502x9gjQ No resources found + cat /tmp/tmp.kWhVqgevt2 + rm /tmp/tmp.ti502x9gjQ /tmp/tmp.kWhVqgevt2 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.WbIviuUKXD ++ mktemp + local LAST_ERR=/tmp/tmp.DZ6VKiXQjZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.WbIviuUKXD No resources found + cat /tmp/tmp.DZ6VKiXQjZ + rm /tmp/tmp.WbIviuUKXD /tmp/tmp.DZ6VKiXQjZ + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns + kubectl_bin get ns ++ mktemp + local LAST_OUT=/tmp/tmp.lAs0JnKaPq ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.bpNI0YTj1o + local exit_status=0 + local LAST_OUT=/tmp/tmp.mZl2lTynKR ++ seq 0 2 ++ mktemp + for i in '$(seq 0 2)' + set +e + kubectl get ns + local LAST_ERR=/tmp/tmp.WcvkwLkPeB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lAs0JnKaPq + cat /tmp/tmp.bpNI0YTj1o + rm /tmp/tmp.lAs0JnKaPq /tmp/tmp.bpNI0YTj1o + return 0 namespace "users-6401" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.mZl2lTynKR namespace "pxc-operator" deleted + cat /tmp/tmp.WcvkwLkPeB + rm /tmp/tmp.mZl2lTynKR /tmp/tmp.WcvkwLkPeB + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.nX5nnl9DB1 ++ mktemp + local LAST_ERR=/tmp/tmp.vH1ouBuuU4 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nX5nnl9DB1 namespace/pxc-operator created + cat /tmp/tmp.vH1ouBuuU4 + rm /tmp/tmp.nX5nnl9DB1 /tmp/tmp.vH1ouBuuU4 + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.DhqIFbOSX7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KtoLRzMW6R ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DhqIFbOSX7 ++ cat /tmp/tmp.KtoLRzMW6R ++ rm /tmp/tmp.DhqIFbOSX7 /tmp/tmp.KtoLRzMW6R ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1715-225b38be-1-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.vz2TiFVDKS ++ mktemp + local LAST_ERR=/tmp/tmp.E10cZrsDnZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1715-225b38be-1-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vz2TiFVDKS Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1715-225b38be-1-cluster8" modified. + cat /tmp/tmp.E10cZrsDnZ + rm /tmp/tmp.vz2TiFVDKS /tmp/tmp.E10cZrsDnZ + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.Vbs6fWs4PE ++ mktemp + local LAST_ERR=/tmp/tmp.GNF5TTkOvS + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Vbs6fWs4PE customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.GNF5TTkOvS + rm /tmp/tmp.Vbs6fWs4PE /tmp/tmp.GNF5TTkOvS + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.SpIGM6RdaP ++ mktemp + local LAST_ERR=/tmp/tmp.qFh4hAzG6z + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SpIGM6RdaP clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.qFh4hAzG6z + rm /tmp/tmp.SpIGM6RdaP /tmp/tmp.qFh4hAzG6z + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1715-225b38be^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/deploy/cw-operator.yaml + kubectl_bin apply -f - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - ++ mktemp + local LAST_OUT=/tmp/tmp.24IPYCXDvT ++ mktemp + local LAST_ERR=/tmp/tmp.ziID0Otiy7 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.24IPYCXDvT deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.ziID0Otiy7 + rm /tmp/tmp.24IPYCXDvT /tmp/tmp.ziID0Otiy7 + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.1T98a3EeJ0 ++ mktemp + local LAST_ERR=/tmp/tmp.TPz715xfFA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1T98a3EeJ0 pod/percona-xtradb-cluster-operator-59b7fbbc57-wcwd6 condition met + cat /tmp/tmp.TPz715xfFA + rm /tmp/tmp.1T98a3EeJ0 /tmp/tmp.TPz715xfFA + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.EvvBwaMpcp +++ mktemp ++ local LAST_ERR=/tmp/tmp.a28bKX4xDA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EvvBwaMpcp ++ cat /tmp/tmp.a28bKX4xDA ++ rm /tmp/tmp.EvvBwaMpcp /tmp/tmp.a28bKX4xDA ++ return 0 + wait_pod percona-xtradb-cluster-operator-59b7fbbc57-wcwd6 480 pxc-operator + local pod=percona-xtradb-cluster-operator-59b7fbbc57-wcwd6 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-59b7fbbc57-wcwd6 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-59b7fbbc57-wcwd6 condition met percona-xtradb-cluster-operator-59b7fbbc57-wcwd6.Ok + sleep 3 + create_namespace users-21638 + local namespace=users-21638 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ awk '{print $1}' ++ grep validate-auth + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces users-21638' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-21638 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-21638 ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.TJOtXlnXx6 + local LAST_OUT=/tmp/tmp.6oJxbCbStl ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.ZuywQ0NCcB + local exit_status=0 + local LAST_ERR=/tmp/tmp.oQyoirvuJW + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-21638 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TJOtXlnXx6 + cat /tmp/tmp.oQyoirvuJW + rm /tmp/tmp.TJOtXlnXx6 /tmp/tmp.oQyoirvuJW + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-21638 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-21638 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.6oJxbCbStl + cat /tmp/tmp.ZuywQ0NCcB Error from server (NotFound): namespaces "users-21638" not found + rm /tmp/tmp.6oJxbCbStl /tmp/tmp.ZuywQ0NCcB + return 1 + : + wait_for_delete namespace/users-21638 + local res=namespace/users-21638 + echo -n 'namespace/users-21638 - ' namespace/users-21638 - + set +o xtrace Error from server (NotFound): namespaces "users-21638" not found + desc 'create namespace users-21638' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-21638 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-21638 ++ mktemp + local LAST_OUT=/tmp/tmp.Vw4k2DJHIi ++ mktemp + local LAST_ERR=/tmp/tmp.aBG6iFLGoB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-21638 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Vw4k2DJHIi namespace/users-21638 created + cat /tmp/tmp.aBG6iFLGoB + rm /tmp/tmp.Vw4k2DJHIi /tmp/tmp.aBG6iFLGoB + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.u5kllZpZcs +++ mktemp ++ local LAST_ERR=/tmp/tmp.ngIVkWFg8L ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.u5kllZpZcs ++ cat /tmp/tmp.ngIVkWFg8L ++ rm /tmp/tmp.u5kllZpZcs /tmp/tmp.ngIVkWFg8L ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1715-225b38be-1-cluster8 --namespace=users-21638 ++ mktemp + local LAST_OUT=/tmp/tmp.3CpD2aBHav ++ mktemp + local LAST_ERR=/tmp/tmp.91cWD7LRFM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1715-225b38be-1-cluster8 --namespace=users-21638 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3CpD2aBHav Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1715-225b38be-1-cluster8" modified. + cat /tmp/tmp.91cWD7LRFM + rm /tmp/tmp.3CpD2aBHav /tmp/tmp.91cWD7LRFM + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.6ACd7yczrb ++ mktemp + local LAST_ERR=/tmp/tmp.gxAIOxVXka + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6ACd7yczrb secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.gxAIOxVXka + rm /tmp/tmp.6ACd7yczrb /tmp/tmp.gxAIOxVXka + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.dHTGs6gOsK ++ mktemp + local LAST_ERR=/tmp/tmp.2iqxYc7NSu + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dHTGs6gOsK secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.2iqxYc7NSu + rm /tmp/tmp.dHTGs6gOsK /tmp/tmp.2iqxYc7NSu + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1715-225b38be#' + local LAST_OUT=/tmp/tmp.t3MRx2zwSV + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-21638~ + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_ERR=/tmp/tmp.z1CVhNrQdk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.t3MRx2zwSV deployment.apps/pxc-client created + cat /tmp/tmp.z1CVhNrQdk + rm /tmp/tmp.t3MRx2zwSV /tmp/tmp.z1CVhNrQdk + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1715-225b38be#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-21638~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + local LAST_OUT=/tmp/tmp.gUERjZzKvZ ++ mktemp + local LAST_ERR=/tmp/tmp.RQdTLcetJ2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gUERjZzKvZ perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.RQdTLcetJ2 + rm /tmp/tmp.gUERjZzKvZ /tmp/tmp.RQdTLcetJ2 + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.wVn1hbuAep ++++ mktemp +++ local LAST_ERR=/tmp/tmp.YZQmPTdBvX +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.wVn1hbuAep +++ cat /tmp/tmp.YZQmPTdBvX +++ rm /tmp/tmp.wVn1hbuAep /tmp/tmp.YZQmPTdBvX +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.QYlnL1Pf6y ++++ mktemp +++ local LAST_ERR=/tmp/tmp.MoxJBTTPCU +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.QYlnL1Pf6y +++ cat /tmp/tmp.MoxJBTTPCU +++ rm /tmp/tmp.QYlnL1Pf6y /tmp/tmp.MoxJBTTPCU +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-21638 ++ mktemp + local LAST_OUT=/tmp/tmp.dgvlxheWQJ ++ mktemp + local LAST_ERR=/tmp/tmp.WoNGIDsfYl + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-21638 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-21638 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-21638 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.dgvlxheWQJ + cat /tmp/tmp.WoNGIDsfYl error: no matching resources found + rm /tmp/tmp.dgvlxheWQJ /tmp/tmp.WoNGIDsfYl + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7CWTpkC8ap +++ mktemp ++ local LAST_ERR=/tmp/tmp.Z3eVOjL12T ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7CWTpkC8ap ++ cat /tmp/tmp.Z3eVOjL12T Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.7CWTpkC8ap /tmp/tmp.Z3eVOjL12T ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jBcgpsZX8p +++ mktemp ++ local LAST_ERR=/tmp/tmp.oniCMT5dfz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jBcgpsZX8p ++ cat /tmp/tmp.oniCMT5dfz ++ rm /tmp/tmp.jBcgpsZX8p /tmp/tmp.oniCMT5dfz ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yDQ6luIRvc +++ mktemp ++ local LAST_ERR=/tmp/tmp.6yfJo1UmQi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yDQ6luIRvc ++ cat /tmp/tmp.6yfJo1UmQi ++ rm /tmp/tmp.yDQ6luIRvc /tmp/tmp.6yfJo1UmQi ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fgtkE728JD +++ mktemp ++ local LAST_ERR=/tmp/tmp.2aX6K4lcc8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fgtkE728JD ++ cat /tmp/tmp.2aX6K4lcc8 ++ rm /tmp/tmp.fgtkE728JD /tmp/tmp.2aX6K4lcc8 ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-1.sql /tmp/tmp.TjI7fh8Fme/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.S61FitYzoF +++ mktemp ++ local LAST_ERR=/tmp/tmp.t4jd89pWE7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.S61FitYzoF ++ cat /tmp/tmp.t4jd89pWE7 ++ rm /tmp/tmp.S61FitYzoF /tmp/tmp.t4jd89pWE7 ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-1.sql /tmp/tmp.TjI7fh8Fme/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.qUAvu46YR3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GwiMv92Zon ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.qUAvu46YR3 ++ cat /tmp/tmp.GwiMv92Zon ++ rm /tmp/tmp.qUAvu46YR3 /tmp/tmp.GwiMv92Zon ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-1.sql /tmp/tmp.TjI7fh8Fme/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7IAgGu83wx +++ mktemp ++ local LAST_ERR=/tmp/tmp.PSdlfVjIb4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7IAgGu83wx ++ cat /tmp/tmp.PSdlfVjIb4 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.7IAgGu83wx /tmp/tmp.PSdlfVjIb4 ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.1xja3l6BCO ++ mktemp + local LAST_ERR=/tmp/tmp.OxGiQS2ftZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1xja3l6BCO secret/my-cluster-secrets patched + cat /tmp/tmp.OxGiQS2ftZ + rm /tmp/tmp.1xja3l6BCO /tmp/tmp.OxGiQS2ftZ + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lpG5s06iKi +++ mktemp ++ local LAST_ERR=/tmp/tmp.HcmkpIy2TB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lpG5s06iKi ++ cat /tmp/tmp.HcmkpIy2TB ++ rm /tmp/tmp.lpG5s06iKi /tmp/tmp.HcmkpIy2TB ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-4.sql /tmp/tmp.TjI7fh8Fme/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.5Ou11GZj7k ++ mktemp + local LAST_ERR=/tmp/tmp.sZW0gyeWLD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5Ou11GZj7k perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.sZW0gyeWLD + rm /tmp/tmp.5Ou11GZj7k /tmp/tmp.sZW0gyeWLD + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TaDjYBA2tD +++ mktemp ++ local LAST_ERR=/tmp/tmp.aTwbGVFVmg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TaDjYBA2tD ++ cat /tmp/tmp.aTwbGVFVmg ++ rm /tmp/tmp.TaDjYBA2tD /tmp/tmp.aTwbGVFVmg ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.6i8p6W8ZQV +++ mktemp ++ local LAST_ERR=/tmp/tmp.RcSUQUvrC1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.6i8p6W8ZQV ++ cat /tmp/tmp.RcSUQUvrC1 ++ rm /tmp/tmp.6i8p6W8ZQV /tmp/tmp.RcSUQUvrC1 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.wsfrx8IzR5 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.4qxoZObs0S +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.wsfrx8IzR5 +++++ cat /tmp/tmp.4qxoZObs0S +++++ rm /tmp/tmp.wsfrx8IzR5 /tmp/tmp.4qxoZObs0S +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.hFtKcujh4p ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.zyNBHmbtwA +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.hFtKcujh4p +++++ cat /tmp/tmp.zyNBHmbtwA +++++ rm /tmp/tmp.hFtKcujh4p /tmp/tmp.zyNBHmbtwA +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uC3SQKQ2h0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.gaxUdeIxLw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uC3SQKQ2h0 ++ cat /tmp/tmp.gaxUdeIxLw ++ rm /tmp/tmp.uC3SQKQ2h0 /tmp/tmp.gaxUdeIxLw ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.LIwYP2Kwiw ++ mktemp + local LAST_ERR=/tmp/tmp.L2MQxfJ0xf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.LIwYP2Kwiw secret/my-cluster-secrets patched + cat /tmp/tmp.L2MQxfJ0xf + rm /tmp/tmp.LIwYP2Kwiw /tmp/tmp.L2MQxfJ0xf + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EedkICg08m +++ mktemp ++ local LAST_ERR=/tmp/tmp.MqmfvY8COz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EedkICg08m ++ cat /tmp/tmp.MqmfvY8COz ++ rm /tmp/tmp.EedkICg08m /tmp/tmp.MqmfvY8COz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NFjdVJkUcK +++ mktemp ++ local LAST_ERR=/tmp/tmp.J9t0t9hsHZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NFjdVJkUcK ++ cat /tmp/tmp.J9t0t9hsHZ ++ rm /tmp/tmp.NFjdVJkUcK /tmp/tmp.J9t0t9hsHZ ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HsFPbotwxL +++ mktemp ++ local LAST_ERR=/tmp/tmp.WlNKnfuUR9 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HsFPbotwxL ++ cat /tmp/tmp.WlNKnfuUR9 ++ rm /tmp/tmp.HsFPbotwxL /tmp/tmp.WlNKnfuUR9 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.V2smoJWYIa ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.BuPns3xxSS +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.V2smoJWYIa +++++ cat /tmp/tmp.BuPns3xxSS +++++ rm /tmp/tmp.V2smoJWYIa /tmp/tmp.BuPns3xxSS +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.61vwtuVgmD ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.LUNV30q5cw +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.61vwtuVgmD +++++ cat /tmp/tmp.LUNV30q5cw +++++ rm /tmp/tmp.61vwtuVgmD /tmp/tmp.LUNV30q5cw +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4IVHypxLG6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.iKzI37mdcW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4IVHypxLG6 ++ cat /tmp/tmp.iKzI37mdcW ++ rm /tmp/tmp.4IVHypxLG6 /tmp/tmp.iKzI37mdcW ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-2.sql /tmp/tmp.TjI7fh8Fme/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-2.sql /tmp/tmp.TjI7fh8Fme/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-2.sql /tmp/tmp.TjI7fh8Fme/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.cPqVJDDOqs ++ mktemp + local LAST_ERR=/tmp/tmp.eyLAZ8KCWx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cPqVJDDOqs perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.eyLAZ8KCWx + rm /tmp/tmp.cPqVJDDOqs /tmp/tmp.eyLAZ8KCWx + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Bkyhtcz5NL ++ mktemp + local LAST_ERR=/tmp/tmp.zMvCxWTWQh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Bkyhtcz5NL secret/my-cluster-secrets patched + cat /tmp/tmp.zMvCxWTWQh + rm /tmp/tmp.Bkyhtcz5NL /tmp/tmp.zMvCxWTWQh + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JevB41lsEm +++ mktemp ++ local LAST_ERR=/tmp/tmp.DuZcaY66Ie ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JevB41lsEm ++ cat /tmp/tmp.DuZcaY66Ie ++ rm /tmp/tmp.JevB41lsEm /tmp/tmp.DuZcaY66Ie ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AvtJryWieW +++ mktemp ++ local LAST_ERR=/tmp/tmp.Re5dquwgFm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AvtJryWieW ++ cat /tmp/tmp.Re5dquwgFm ++ rm /tmp/tmp.AvtJryWieW /tmp/tmp.Re5dquwgFm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8U0vJNFWrB +++ mktemp ++ local LAST_ERR=/tmp/tmp.DgCq9XZzXe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8U0vJNFWrB ++ cat /tmp/tmp.DgCq9XZzXe ++ rm /tmp/tmp.8U0vJNFWrB /tmp/tmp.DgCq9XZzXe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OKmkqElHE6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.W01cn84AFD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OKmkqElHE6 ++ cat /tmp/tmp.W01cn84AFD ++ rm /tmp/tmp.OKmkqElHE6 /tmp/tmp.W01cn84AFD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wDESY8zZCH +++ mktemp ++ local LAST_ERR=/tmp/tmp.eCLSOy7M4w ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wDESY8zZCH ++ cat /tmp/tmp.eCLSOy7M4w ++ rm /tmp/tmp.wDESY8zZCH /tmp/tmp.eCLSOy7M4w ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iuTRb1q324 +++ mktemp ++ local LAST_ERR=/tmp/tmp.eTuShmxzpv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iuTRb1q324 ++ cat /tmp/tmp.eTuShmxzpv ++ rm /tmp/tmp.iuTRb1q324 /tmp/tmp.eTuShmxzpv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.74leiwTLP3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.kTrugLP6XQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.74leiwTLP3 ++ cat /tmp/tmp.kTrugLP6XQ ++ rm /tmp/tmp.74leiwTLP3 /tmp/tmp.kTrugLP6XQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.N5kixzVgEw +++ mktemp ++ local LAST_ERR=/tmp/tmp.87z44Ere2S ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.N5kixzVgEw ++ cat /tmp/tmp.87z44Ere2S ++ rm /tmp/tmp.N5kixzVgEw /tmp/tmp.87z44Ere2S ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CJa2T9AoBn +++ mktemp ++ local LAST_ERR=/tmp/tmp.gHnm7Tv9CN ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CJa2T9AoBn ++ cat /tmp/tmp.gHnm7Tv9CN ++ rm /tmp/tmp.CJa2T9AoBn /tmp/tmp.gHnm7Tv9CN ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.rInMpRWAh2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.x3RNKan5wp +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.rInMpRWAh2 +++++ cat /tmp/tmp.x3RNKan5wp +++++ rm /tmp/tmp.rInMpRWAh2 /tmp/tmp.x3RNKan5wp +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.5lAgAMPHqg ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.MrDCEQ3QM7 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.5lAgAMPHqg +++++ cat /tmp/tmp.MrDCEQ3QM7 +++++ rm /tmp/tmp.5lAgAMPHqg /tmp/tmp.MrDCEQ3QM7 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HmQdsb1jDc +++ mktemp ++ local LAST_ERR=/tmp/tmp.pmxVY9P21G ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HmQdsb1jDc ++ cat /tmp/tmp.pmxVY9P21G ++ rm /tmp/tmp.HmQdsb1jDc /tmp/tmp.pmxVY9P21G ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-3.sql /tmp/tmp.TjI7fh8Fme/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.0211XENPVX ++ mktemp + local LAST_ERR=/tmp/tmp.68js7X6Ud0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.0211XENPVX secret/my-cluster-secrets patched + cat /tmp/tmp.68js7X6Ud0 + rm /tmp/tmp.0211XENPVX /tmp/tmp.68js7X6Ud0 + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.7tSJiqS3Rv +++ mktemp ++ local LAST_ERR=/tmp/tmp.3FuaHBNNa4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7tSJiqS3Rv ++ cat /tmp/tmp.3FuaHBNNa4 ++ rm /tmp/tmp.7tSJiqS3Rv /tmp/tmp.3FuaHBNNa4 ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.e31TUHk9P2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.08Sse7YE2r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.e31TUHk9P2 ++ cat /tmp/tmp.08Sse7YE2r ++ rm /tmp/tmp.e31TUHk9P2 /tmp/tmp.08Sse7YE2r ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9yBUBz40Lt +++ mktemp ++ local LAST_ERR=/tmp/tmp.VzqWHCJ8Wu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9yBUBz40Lt ++ cat /tmp/tmp.VzqWHCJ8Wu ++ rm /tmp/tmp.9yBUBz40Lt /tmp/tmp.VzqWHCJ8Wu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ipNesGDgZP +++ mktemp ++ local LAST_ERR=/tmp/tmp.INe7GxV3Ki ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ipNesGDgZP ++ cat /tmp/tmp.INe7GxV3Ki ++ rm /tmp/tmp.ipNesGDgZP /tmp/tmp.INe7GxV3Ki ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.px8kvJuYZp +++ mktemp ++ local LAST_ERR=/tmp/tmp.brTJQ6AhvF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.px8kvJuYZp ++ cat /tmp/tmp.brTJQ6AhvF ++ rm /tmp/tmp.px8kvJuYZp /tmp/tmp.brTJQ6AhvF ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.bUNLaBMfkF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.kw57mIka1l +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.bUNLaBMfkF +++++ cat /tmp/tmp.kw57mIka1l +++++ rm /tmp/tmp.bUNLaBMfkF /tmp/tmp.kw57mIka1l +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.UHNxRYxeKB ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.arohaFysWG +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.UHNxRYxeKB +++++ cat /tmp/tmp.arohaFysWG +++++ rm /tmp/tmp.UHNxRYxeKB /tmp/tmp.arohaFysWG +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KtiBZZuRRT +++ mktemp ++ local LAST_ERR=/tmp/tmp.wUdOZovkDH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KtiBZZuRRT ++ cat /tmp/tmp.wUdOZovkDH ++ rm /tmp/tmp.KtiBZZuRRT /tmp/tmp.wUdOZovkDH ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cBdg1EN4JM +++ mktemp ++ local LAST_ERR=/tmp/tmp.YoiF3JRpql ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cBdg1EN4JM ++ cat /tmp/tmp.YoiF3JRpql ++ rm /tmp/tmp.cBdg1EN4JM /tmp/tmp.YoiF3JRpql ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-4.sql /tmp/tmp.TjI7fh8Fme/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ZnsE5WTjut ++ mktemp + local LAST_ERR=/tmp/tmp.igcImhrF47 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ZnsE5WTjut secret/my-cluster-secrets patched + cat /tmp/tmp.igcImhrF47 + rm /tmp/tmp.ZnsE5WTjut /tmp/tmp.igcImhrF47 + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZJ56qanFpY +++ mktemp ++ local LAST_ERR=/tmp/tmp.x2XPJJRtMv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZJ56qanFpY ++ cat /tmp/tmp.x2XPJJRtMv ++ rm /tmp/tmp.ZJ56qanFpY /tmp/tmp.x2XPJJRtMv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.P3JyTMXAbN +++ mktemp ++ local LAST_ERR=/tmp/tmp.pGJe7FQhbF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.P3JyTMXAbN ++ cat /tmp/tmp.pGJe7FQhbF ++ rm /tmp/tmp.P3JyTMXAbN /tmp/tmp.pGJe7FQhbF ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JvY7DST7PV +++ mktemp ++ local LAST_ERR=/tmp/tmp.PfcJekb761 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JvY7DST7PV ++ cat /tmp/tmp.PfcJekb761 ++ rm /tmp/tmp.JvY7DST7PV /tmp/tmp.PfcJekb761 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.6bqmWFHZyk ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.H6AciwLyl5 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.6bqmWFHZyk +++++ cat /tmp/tmp.H6AciwLyl5 +++++ rm /tmp/tmp.6bqmWFHZyk /tmp/tmp.H6AciwLyl5 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.huvmJFhKxl ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.d8oQZkIcZV +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.huvmJFhKxl +++++ cat /tmp/tmp.d8oQZkIcZV +++++ rm /tmp/tmp.huvmJFhKxl /tmp/tmp.d8oQZkIcZV +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dkEmaXz9mF +++ mktemp ++ local LAST_ERR=/tmp/tmp.06ifIa7yBm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dkEmaXz9mF ++ cat /tmp/tmp.06ifIa7yBm ++ rm /tmp/tmp.dkEmaXz9mF /tmp/tmp.06ifIa7yBm ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.UAStfnjVyx +++ mktemp ++ local LAST_ERR=/tmp/tmp.y1isLgTAo2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.UAStfnjVyx ++ cat /tmp/tmp.y1isLgTAo2 ++ rm /tmp/tmp.UAStfnjVyx /tmp/tmp.y1isLgTAo2 ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-4.sql /tmp/tmp.TjI7fh8Fme/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.Fwzqd3NeLz ++ mktemp + local LAST_ERR=/tmp/tmp.E2FRaRPpfL + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Fwzqd3NeLz perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.E2FRaRPpfL + rm /tmp/tmp.Fwzqd3NeLz /tmp/tmp.E2FRaRPpfL + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gbt893MotN +++ mktemp ++ local LAST_ERR=/tmp/tmp.CDR8MWOmvI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gbt893MotN ++ cat /tmp/tmp.CDR8MWOmvI ++ rm /tmp/tmp.gbt893MotN /tmp/tmp.CDR8MWOmvI ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.bShZ9lk3YE +++ mktemp ++ local LAST_ERR=/tmp/tmp.1UUucfQeKl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.bShZ9lk3YE ++ cat /tmp/tmp.1UUucfQeKl ++ rm /tmp/tmp.bShZ9lk3YE /tmp/tmp.1UUucfQeKl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0SlYleLSyy +++ mktemp ++ local LAST_ERR=/tmp/tmp.wGnaxZvBuL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0SlYleLSyy ++ cat /tmp/tmp.wGnaxZvBuL ++ rm /tmp/tmp.0SlYleLSyy /tmp/tmp.wGnaxZvBuL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rBjFS5aLbx +++ mktemp ++ local LAST_ERR=/tmp/tmp.r5QtQfphoP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rBjFS5aLbx ++ cat /tmp/tmp.r5QtQfphoP ++ rm /tmp/tmp.rBjFS5aLbx /tmp/tmp.r5QtQfphoP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.l3XURIlFen +++ mktemp ++ local LAST_ERR=/tmp/tmp.GSTB9QxYt6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.l3XURIlFen ++ cat /tmp/tmp.GSTB9QxYt6 ++ rm /tmp/tmp.l3XURIlFen /tmp/tmp.GSTB9QxYt6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.sSAc6n3N9e +++ mktemp ++ local LAST_ERR=/tmp/tmp.cyocWuUAkG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sSAc6n3N9e ++ cat /tmp/tmp.cyocWuUAkG ++ rm /tmp/tmp.sSAc6n3N9e /tmp/tmp.cyocWuUAkG ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Yfab8osMrg +++ mktemp ++ local LAST_ERR=/tmp/tmp.bPxjC9Cymy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Yfab8osMrg ++ cat /tmp/tmp.bPxjC9Cymy ++ rm /tmp/tmp.Yfab8osMrg /tmp/tmp.bPxjC9Cymy ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kKEkxyS5iK +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZfSuYnYhPH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kKEkxyS5iK ++ cat /tmp/tmp.ZfSuYnYhPH ++ rm /tmp/tmp.kKEkxyS5iK /tmp/tmp.ZfSuYnYhPH ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.FPM8nuM7BX +++ mktemp ++ local LAST_ERR=/tmp/tmp.TN3eYxls5K ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.FPM8nuM7BX ++ cat /tmp/tmp.TN3eYxls5K ++ rm /tmp/tmp.FPM8nuM7BX /tmp/tmp.TN3eYxls5K ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7xFjjwoGW7 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.osq5kOYP94 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7xFjjwoGW7 +++++ cat /tmp/tmp.osq5kOYP94 +++++ rm /tmp/tmp.7xFjjwoGW7 /tmp/tmp.osq5kOYP94 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.oVXDiIv3gI ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.64Ddd6oV2f +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.oVXDiIv3gI +++++ cat /tmp/tmp.64Ddd6oV2f +++++ rm /tmp/tmp.oVXDiIv3gI /tmp/tmp.64Ddd6oV2f +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pFsU8OX15n +++ mktemp ++ local LAST_ERR=/tmp/tmp.FKFqdBp1NA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pFsU8OX15n ++ cat /tmp/tmp.FKFqdBp1NA ++ rm /tmp/tmp.pFsU8OX15n /tmp/tmp.FKFqdBp1NA ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.nLh7pg2fEk ++ mktemp + local LAST_ERR=/tmp/tmp.Kh0HsHlMAM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nLh7pg2fEk secret/my-cluster-secrets-2 patched + cat /tmp/tmp.Kh0HsHlMAM + rm /tmp/tmp.nLh7pg2fEk /tmp/tmp.Kh0HsHlMAM + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.crHvhIfoTC +++ mktemp ++ local LAST_ERR=/tmp/tmp.YNbtHWMo4a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.crHvhIfoTC ++ cat /tmp/tmp.YNbtHWMo4a ++ rm /tmp/tmp.crHvhIfoTC /tmp/tmp.YNbtHWMo4a ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cknv9Xghqf +++ mktemp ++ local LAST_ERR=/tmp/tmp.7VkoyzKyhj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cknv9Xghqf ++ cat /tmp/tmp.7VkoyzKyhj ++ rm /tmp/tmp.cknv9Xghqf /tmp/tmp.7VkoyzKyhj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xExBlOwZcD +++ mktemp ++ local LAST_ERR=/tmp/tmp.7Z5avN6hea ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xExBlOwZcD ++ cat /tmp/tmp.7Z5avN6hea ++ rm /tmp/tmp.xExBlOwZcD /tmp/tmp.7Z5avN6hea ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MxS80VW9wp +++ mktemp ++ local LAST_ERR=/tmp/tmp.lUg2FtruFI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MxS80VW9wp ++ cat /tmp/tmp.lUg2FtruFI ++ rm /tmp/tmp.MxS80VW9wp /tmp/tmp.lUg2FtruFI ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.XX8jtdB2rK ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.wCWsicXw32 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.XX8jtdB2rK +++++ cat /tmp/tmp.wCWsicXw32 +++++ rm /tmp/tmp.XX8jtdB2rK /tmp/tmp.wCWsicXw32 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.VIJGBC0nZp ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.mpJrMF4bvJ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.VIJGBC0nZp +++++ cat /tmp/tmp.mpJrMF4bvJ +++++ rm /tmp/tmp.VIJGBC0nZp /tmp/tmp.mpJrMF4bvJ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Ggrjrcg0hP +++ mktemp ++ local LAST_ERR=/tmp/tmp.dJXiJuhRqo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Ggrjrcg0hP ++ cat /tmp/tmp.dJXiJuhRqo ++ rm /tmp/tmp.Ggrjrcg0hP /tmp/tmp.dJXiJuhRqo ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EhA41aEVWG +++ mktemp ++ local LAST_ERR=/tmp/tmp.ev6INa2nJY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EhA41aEVWG ++ cat /tmp/tmp.ev6INa2nJY ++ rm /tmp/tmp.EhA41aEVWG /tmp/tmp.ev6INa2nJY ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-4.sql /tmp/tmp.TjI7fh8Fme/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.IlQGuktRyl +++ mktemp ++ local LAST_ERR=/tmp/tmp.AtfXhnBjy6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IlQGuktRyl ++ cat /tmp/tmp.AtfXhnBjy6 ++ rm /tmp/tmp.IlQGuktRyl /tmp/tmp.AtfXhnBjy6 ++ return 0 + newpass='%~dpy=^%f7r%0],wB' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''%~dpy=^%f7r%0],wB'\'';' '-h some-name-pxc -uroot -p'\''%~dpy=^%f7r%0],wB'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''%~dpy=^%f7r%0],wB'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''%~dpy=^%f7r%0],wB'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cs5uChjZyO +++ mktemp ++ local LAST_ERR=/tmp/tmp.CxCj6SMKU6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cs5uChjZyO ++ cat /tmp/tmp.CxCj6SMKU6 ++ rm /tmp/tmp.cs5uChjZyO /tmp/tmp.CxCj6SMKU6 ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''%~dpy=^%f7r%0],wB'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''%~dpy=^%f7r%0],wB'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''%~dpy=^%f7r%0],wB'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''%~dpy=^%f7r%0],wB'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iq1gyx9Sz1 +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hf42UqpjAS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iq1gyx9Sz1 ++ cat /tmp/tmp.Hf42UqpjAS ++ rm /tmp/tmp.iq1gyx9Sz1 /tmp/tmp.Hf42UqpjAS ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-4.sql /tmp/tmp.TjI7fh8Fme/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.KHUQgvp3Cg +++ mktemp ++ local LAST_ERR=/tmp/tmp.kabOAaSw2P ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KHUQgvp3Cg ++ cat /tmp/tmp.kabOAaSw2P ++ rm /tmp/tmp.KHUQgvp3Cg /tmp/tmp.kabOAaSw2P ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.RzlM47LrER ++ mktemp + local LAST_ERR=/tmp/tmp.ISZGt4TMhK + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.RzlM47LrER secret/my-cluster-secrets-2 configured + cat /tmp/tmp.ISZGt4TMhK Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.RzlM47LrER /tmp/tmp.ISZGt4TMhK + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.T6YbNePTfN +++ mktemp ++ local LAST_ERR=/tmp/tmp.ZMrSjM3NSr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.T6YbNePTfN ++ cat /tmp/tmp.ZMrSjM3NSr ++ rm /tmp/tmp.T6YbNePTfN /tmp/tmp.ZMrSjM3NSr ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-4.sql /tmp/tmp.TjI7fh8Fme/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.YMZl3nEVtt + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-21638~ + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1715-225b38be#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ mktemp + local LAST_ERR=/tmp/tmp.xMpXoQSSBN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YMZl3nEVtt perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.xMpXoQSSBN + rm /tmp/tmp.YMZl3nEVtt /tmp/tmp.xMpXoQSSBN + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.70LvhV0pl0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.sIuRj0tM73 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.70LvhV0pl0 ++ cat /tmp/tmp.sIuRj0tM73 ++ rm /tmp/tmp.70LvhV0pl0 /tmp/tmp.sIuRj0tM73 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QlCcCY1NB8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.x9TJiW9xlz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QlCcCY1NB8 ++ cat /tmp/tmp.x9TJiW9xlz ++ rm /tmp/tmp.QlCcCY1NB8 /tmp/tmp.x9TJiW9xlz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TcVrUbv1dw +++ mktemp ++ local LAST_ERR=/tmp/tmp.30FoBU5vWm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TcVrUbv1dw ++ cat /tmp/tmp.30FoBU5vWm ++ rm /tmp/tmp.TcVrUbv1dw /tmp/tmp.30FoBU5vWm ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hwkQObIjD8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4q9rc1Q5Hl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hwkQObIjD8 ++ cat /tmp/tmp.4q9rc1Q5Hl ++ rm /tmp/tmp.hwkQObIjD8 /tmp/tmp.4q9rc1Q5Hl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.VVZCqJSHpw +++ mktemp ++ local LAST_ERR=/tmp/tmp.y3XlSpG15U ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.VVZCqJSHpw ++ cat /tmp/tmp.y3XlSpG15U ++ rm /tmp/tmp.VVZCqJSHpw /tmp/tmp.y3XlSpG15U ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wMjB2DnIUK +++ mktemp ++ local LAST_ERR=/tmp/tmp.ARhHpK5ZQ7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wMjB2DnIUK ++ cat /tmp/tmp.ARhHpK5ZQ7 ++ rm /tmp/tmp.wMjB2DnIUK /tmp/tmp.ARhHpK5ZQ7 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4nM2TanCZk +++ mktemp ++ local LAST_ERR=/tmp/tmp.H0NpSzwXi8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4nM2TanCZk ++ cat /tmp/tmp.H0NpSzwXi8 ++ rm /tmp/tmp.4nM2TanCZk /tmp/tmp.H0NpSzwXi8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LQSW1bhJhk +++ mktemp ++ local LAST_ERR=/tmp/tmp.E7tFVyZ362 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LQSW1bhJhk ++ cat /tmp/tmp.E7tFVyZ362 ++ rm /tmp/tmp.LQSW1bhJhk /tmp/tmp.E7tFVyZ362 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hiZHW9JOMh +++ mktemp ++ local LAST_ERR=/tmp/tmp.59TdZGStIB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hiZHW9JOMh ++ cat /tmp/tmp.59TdZGStIB ++ rm /tmp/tmp.hiZHW9JOMh /tmp/tmp.59TdZGStIB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ifNYLD5viD +++ mktemp ++ local LAST_ERR=/tmp/tmp.MJ4slNYKlq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ifNYLD5viD ++ cat /tmp/tmp.MJ4slNYKlq ++ rm /tmp/tmp.ifNYLD5viD /tmp/tmp.MJ4slNYKlq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cObMpMy9MJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.ci2v4TdHxY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cObMpMy9MJ ++ cat /tmp/tmp.ci2v4TdHxY ++ rm /tmp/tmp.cObMpMy9MJ /tmp/tmp.ci2v4TdHxY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Px2fQB4mFw +++ mktemp ++ local LAST_ERR=/tmp/tmp.jCRcanDiff ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Px2fQB4mFw ++ cat /tmp/tmp.jCRcanDiff ++ rm /tmp/tmp.Px2fQB4mFw /tmp/tmp.jCRcanDiff ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xnr5pNuCB8 +++ mktemp ++ local LAST_ERR=/tmp/tmp.uMydLwbaeB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xnr5pNuCB8 ++ cat /tmp/tmp.uMydLwbaeB ++ rm /tmp/tmp.xnr5pNuCB8 /tmp/tmp.uMydLwbaeB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uSdsz9W6pX +++ mktemp ++ local LAST_ERR=/tmp/tmp.tbCuk1EQsq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uSdsz9W6pX ++ cat /tmp/tmp.tbCuk1EQsq ++ rm /tmp/tmp.uSdsz9W6pX /tmp/tmp.tbCuk1EQsq ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SXv1V9u9hX +++ mktemp ++ local LAST_ERR=/tmp/tmp.evr2jMuYsV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SXv1V9u9hX ++ cat /tmp/tmp.evr2jMuYsV ++ rm /tmp/tmp.SXv1V9u9hX /tmp/tmp.evr2jMuYsV ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.CnNaqvkmHL ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.kJJvUDZQOh +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.CnNaqvkmHL +++++ cat /tmp/tmp.kJJvUDZQOh +++++ rm /tmp/tmp.CnNaqvkmHL /tmp/tmp.kJJvUDZQOh +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RoSPeLwR4K +++ mktemp ++ local LAST_ERR=/tmp/tmp.P9chDo0c8E ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RoSPeLwR4K ++ cat /tmp/tmp.P9chDo0c8E ++ rm /tmp/tmp.RoSPeLwR4K /tmp/tmp.P9chDo0c8E ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.TV526uyncO ++ mktemp + local LAST_ERR=/tmp/tmp.aKrUz5rzZx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.TV526uyncO secret/my-cluster-secrets patched + cat /tmp/tmp.aKrUz5rzZx + rm /tmp/tmp.TV526uyncO /tmp/tmp.aKrUz5rzZx + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cplXFnp7At +++ mktemp ++ local LAST_ERR=/tmp/tmp.sexFW70kGD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cplXFnp7At ++ cat /tmp/tmp.sexFW70kGD ++ rm /tmp/tmp.cplXFnp7At /tmp/tmp.sexFW70kGD ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yg5ut9Up4k +++ mktemp ++ local LAST_ERR=/tmp/tmp.nIkOjZPGzc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yg5ut9Up4k ++ cat /tmp/tmp.nIkOjZPGzc ++ rm /tmp/tmp.yg5ut9Up4k /tmp/tmp.nIkOjZPGzc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uqHWHkJFPh +++ mktemp ++ local LAST_ERR=/tmp/tmp.e7O4BaLUy4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uqHWHkJFPh ++ cat /tmp/tmp.e7O4BaLUy4 ++ rm /tmp/tmp.uqHWHkJFPh /tmp/tmp.e7O4BaLUy4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.HYRuyp41fb +++ mktemp ++ local LAST_ERR=/tmp/tmp.mvZr97Daq2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HYRuyp41fb ++ cat /tmp/tmp.mvZr97Daq2 ++ rm /tmp/tmp.HYRuyp41fb /tmp/tmp.mvZr97Daq2 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gvQu4N4ySw +++ mktemp ++ local LAST_ERR=/tmp/tmp.Cy612SCHxS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gvQu4N4ySw ++ cat /tmp/tmp.Cy612SCHxS ++ rm /tmp/tmp.gvQu4N4ySw /tmp/tmp.Cy612SCHxS ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AqOXzHgdFF ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.5KbEZIGdCR +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AqOXzHgdFF +++++ cat /tmp/tmp.5KbEZIGdCR +++++ rm /tmp/tmp.AqOXzHgdFF /tmp/tmp.5KbEZIGdCR +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PI8xK9dtJK +++ mktemp ++ local LAST_ERR=/tmp/tmp.CONMnITmYm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PI8xK9dtJK ++ cat /tmp/tmp.CONMnITmYm ++ rm /tmp/tmp.PI8xK9dtJK /tmp/tmp.CONMnITmYm ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yCJJZP9GRz +++ mktemp ++ local LAST_ERR=/tmp/tmp.vmPSyuj4pc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yCJJZP9GRz ++ cat /tmp/tmp.vmPSyuj4pc ++ rm /tmp/tmp.yCJJZP9GRz /tmp/tmp.vmPSyuj4pc ++ return 0 + client_pod=pxc-client-64b479df95-npn8s + wait_pod pxc-client-64b479df95-npn8s + local pod=pxc-client-64b479df95-npn8s + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-npn8s ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-npn8s condition met pxc-client-64b479df95-npn8s.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.TjI7fh8Fme/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1715/e2e-tests/users/compare/select-3.sql /tmp/tmp.TjI7fh8Fme/select-3.sql + destroy users-21638 + local namespace=users-21638 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' ++ get_operator_pod + grep -v 'get backup status: Job.batch' ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + grep -v 'the object has been modified' + grep -v level=info + sort -u + tee /tmp/tmp.TjI7fh8Fme/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZIK4FnfHeV +++ mktemp ++ local LAST_ERR=/tmp/tmp.75Ov04Sk5O ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZIK4FnfHeV ++ cat /tmp/tmp.75Ov04Sk5O ++ rm /tmp/tmp.ZIK4FnfHeV /tmp/tmp.75Ov04Sk5O ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-59b7fbbc57-wcwd6 ++ mktemp + local LAST_OUT=/tmp/tmp.j14ixKEZex ++ mktemp + local LAST_ERR=/tmp/tmp.xb6XAR4D6f + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-59b7fbbc57-wcwd6 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.j14ixKEZex + cat /tmp/tmp.xb6XAR4D6f + rm /tmp/tmp.j14ixKEZex /tmp/tmp.xb6XAR4D6f + return 0 2024-05-16T03:48:41.280Z INFO setup Manager starting up {"gitCommit": "225b38be01f1d85ab3ba5404a1190d44219127c8", "gitBranch": "PR-1715-225b38be", "buildTime": "2024-05-16T01:35:00Z", "goVersion": "go1.22.3", "os": "linux", "arch": "amd64"} 2024-05-16T03:48:41.280Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1300000"} 2024-05-16T03:48:41.282Z INFO setup Registering Components. 2024-05-16T03:48:42.625Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-05-16T03:48:42.627Z INFO setup Starting the Cmd. 2024-05-16T03:48:42.628Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-05-16T03:48:42.628Z INFO controller-runtime.metrics Starting metrics server 2024-05-16T03:48:42.628Z INFO controller-runtime.webhook Starting webhook server 2024-05-16T03:48:42.628Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-05-16T03:48:42.629Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-05-16T03:48:42.629Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-05-16T03:48:42.629Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-05-16T03:48:42.729Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-05-16T03:48:42.752Z DEBUG events percona-xtradb-cluster-operator-59b7fbbc57-wcwd6_d87c43e6-4004-4944-9a04-0bb50547cc15 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"d9a87ecc-0c72-410b-a580-37c0e94acef6","apiVersion":"coordination.k8s.io/v1","resourceVersion":"74746"}, "reason": "LeaderElection"} 2024-05-16T03:48:42.752Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-05-16T03:48:42.753Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-05-16T03:48:42.753Z INFO Starting Controller {"controller": "pxc-controller"} 2024-05-16T03:48:42.753Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-05-16T03:48:42.753Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: unknown type"} 2024-05-16T03:48:42.753Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: unknown type"} 2024-05-16T03:48:42.753Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: unknown type"} 2024-05-16T03:48:42.866Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-05-16T03:48:42.866Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-05-16T03:48:42.866Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-05-16T03:49:10.003Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "8e676459-7383-454e-ba67-5d7f6b81191d", "version": "1.15.0"} 2024-05-16T03:50:23.821Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "172ada3b-57a9-471a-8431-8602f787c68a", "user": "operator"} 2024-05-16T03:50:23.854Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "172ada3b-57a9-471a-8431-8602f787c68a", "user": "monitor"} 2024-05-16T03:50:23.888Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "172ada3b-57a9-471a-8431-8602f787c68a"} 2024-05-16T03:50:23.938Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "172ada3b-57a9-471a-8431-8602f787c68a", "user": "xtrabackup"} 2024-05-16T03:50:23.972Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "172ada3b-57a9-471a-8431-8602f787c68a"} 2024-05-16T03:50:24.083Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "172ada3b-57a9-471a-8431-8602f787c68a", "err": "get primary pxc pod: not found"} 2024-05-16T03:50:29.325Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "062febf4-53db-44d5-981c-a08596b27ddc", "err": "get primary pxc pod: not found"} 2024-05-16T03:50:34.615Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "7dad834c-0098-43b5-98b1-5dfee78d75bb", "err": "get primary pxc pod: not found"} 2024-05-16T03:50:39.777Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "a6af7f29-3a49-4f6b-a40e-02e4d7c8ff27", "err": "get primary pxc pod: not found"} 2024-05-16T03:52:51.417Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "969b4ebd-ff05-469c-aa23-c7a0e6b2a79e", "user": "root"} 2024-05-16T03:52:51.460Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "969b4ebd-ff05-469c-aa23-c7a0e6b2a79e", "user": "replication"} 2024-05-16T03:52:51.667Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "969b4ebd-ff05-469c-aa23-c7a0e6b2a79e", "new version": "5.7.44-48-57"} 2024-05-16T03:52:55.048Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "969b4ebd-ff05-469c-aa23-c7a0e6b2a79e"} 2024-05-16T03:52:59.777Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "44c37e05-f03e-4cc0-877d-5f9f4b20aa78"} 2024-05-16T03:53:05.165Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "97fe83a4-b686-48ce-91f6-00bbb31deab3"} 2024-05-16T03:53:10.457Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "c142216e-1ace-48a8-a7dd-8dd374288a84"} 2024-05-16T03:53:15.643Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "0b27852c-4d64-490e-aabb-baba7f8cb362"} 2024-05-16T03:53:20.878Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "dc201503-2845-475d-9018-2916c09d9de0"} 2024-05-16T03:53:25.978Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "79dc440f-7fb4-4d41-bc49-13df1e8148ee"} 2024-05-16T03:53:31.829Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "dd077143-745d-492a-8b96-313d68a85fb9"} 2024-05-16T03:53:37.443Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "ec57ef2c-5f2a-45ba-85dc-c5c3fff9ec40"} 2024-05-16T03:53:42.361Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "aa1db141-9b45-4fd2-80a2-5dad2d9f32a4"} 2024-05-16T03:53:47.653Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "5201e47a-ca28-4c28-b044-485fe4215115"} 2024-05-16T03:53:52.926Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "09315d7d-ad1f-4ad6-9ae0-c9d159882e8f"} 2024-05-16T03:53:58.081Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "8a1514ee-1ee7-4426-ac5f-fde3983a7ca9"} 2024-05-16T03:54:00.243Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "38d6407e-5124-4f91-8b0b-374f93b61e78", "user": "root"} 2024-05-16T03:54:00.270Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "38d6407e-5124-4f91-8b0b-374f93b61e78", "user": "root"} 2024-05-16T03:54:00.281Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "38d6407e-5124-4f91-8b0b-374f93b61e78", "secret": "some-name-mysql-init", "user": "root"} 2024-05-16T03:54:05.867Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "38d6407e-5124-4f91-8b0b-374f93b61e78"} 2024-05-16T03:54:05.877Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "38d6407e-5124-4f91-8b0b-374f93b61e78", "user": "root"} 2024-05-16T03:54:09.872Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "38d6407e-5124-4f91-8b0b-374f93b61e78"} 2024-05-16T03:54:14.484Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "1eb97461-ab5c-4f55-a506-6ae45cdddb3f"} 2024-05-16T03:54:19.833Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "2ab6a4eb-0461-4820-b3af-aa94712afd1f"} 2024-05-16T03:54:41.594Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "74e8cef0-aa49-4231-9d25-7b3300b34c60", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-16T03:54:42.498Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "829da518-3679-4f3a-adac-4e56d075ff18", "user": "proxyadmin"} 2024-05-16T03:54:42.498Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "829da518-3679-4f3a-adac-4e56d075ff18", "user": "proxyadmin"} 2024-05-16T03:54:42.556Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "829da518-3679-4f3a-adac-4e56d075ff18", "user": "proxyadmin"} 2024-05-16T03:54:42.571Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "829da518-3679-4f3a-adac-4e56d075ff18", "user": "proxyadmin"} 2024-05-16T03:54:42.571Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "829da518-3679-4f3a-adac-4e56d075ff18", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-05-16T03:54:42.854Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "829da518-3679-4f3a-adac-4e56d075ff18", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-16T03:55:26.728Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "da5f033f-08fd-4468-85d0-3afc4cd4b3b8"} 2024-05-16T03:55:34.578Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "00992a55-9e13-43f5-8cac-c6a6e0cd73cd", "user": "xtrabackup"} 2024-05-16T03:55:34.603Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "00992a55-9e13-43f5-8cac-c6a6e0cd73cd", "user": "xtrabackup"} 2024-05-16T03:55:34.616Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "00992a55-9e13-43f5-8cac-c6a6e0cd73cd", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-16T03:55:34.625Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "00992a55-9e13-43f5-8cac-c6a6e0cd73cd", "user": "xtrabackup"} 2024-05-16T03:55:34.625Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "00992a55-9e13-43f5-8cac-c6a6e0cd73cd", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-05-16T03:55:44.357Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e70d514c-a359-4871-b888-9b84f1ab0776"} 2024-05-16T03:57:30.067Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "83a147ce-c09e-4686-8bce-f0ee8b56b98b", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.24.34.77:33062: connect: connection refused"} 2024-05-16T03:57:50.332Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "51e80a65-da5e-4258-8dfd-cfc9dcac4077", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.24.34.77:33062: i/o timeout"} 2024-05-16T03:57:55.541Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "c30f484f-fab9-451a-8ff1-db927a3bca5a", "primary name": "some-name-pxc-0.some-name-pxc.users-21638.svc.cluster.local"} 2024-05-16T03:58:00.743Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "a3dc1291-dc35-49e5-94eb-41d44fdb1743", "primary name": "some-name-pxc-0.some-name-pxc.users-21638.svc.cluster.local"} 2024-05-16T03:58:05.954Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "d65cd3cf-5903-4996-8595-c29a2bbe32ca", "primary name": "some-name-pxc-0.some-name-pxc.users-21638.svc.cluster.local"} 2024-05-16T03:58:11.223Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0ff7d0e-4cf6-475c-906a-6780b1668ede", "primary name": "some-name-pxc-0.some-name-pxc.users-21638.svc.cluster.local"} 2024-05-16T03:58:20.168Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "c5b7d8b4-43d2-4582-9f9c-cf0c87d64a51"} 2024-05-16T03:58:25.038Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "5333e5c5-9171-4595-bd75-a5eadfe1c435"} 2024-05-16T03:58:26.844Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "49fb82f7-6bee-46c3-b607-1230143da662", "user": "monitor"} 2024-05-16T03:58:26.866Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "49fb82f7-6bee-46c3-b607-1230143da662", "user": "monitor"} 2024-05-16T03:58:26.878Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "49fb82f7-6bee-46c3-b607-1230143da662", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-16T03:58:26.910Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "49fb82f7-6bee-46c3-b607-1230143da662", "user": "monitor"} 2024-05-16T03:58:26.926Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "49fb82f7-6bee-46c3-b607-1230143da662", "user": "monitor"} 2024-05-16T03:58:26.926Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "49fb82f7-6bee-46c3-b607-1230143da662", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-05-16T03:58:29.862Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "49fb82f7-6bee-46c3-b607-1230143da662", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-16T03:59:04.747Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "4ed50a94-7609-4b09-b3e3-1f429b605de1"} 2024-05-16T03:59:09.237Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "86cf81c6-c230-4139-84da-ffc0b815b17b"} 2024-05-16T03:59:14.449Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "31d296c3-9bf3-4f7f-9344-6e2440a9eb5c"} 2024-05-16T03:59:19.749Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "8fb2be2b-9758-4f31-a1e7-bc553a24599c"} 2024-05-16T03:59:25.128Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "08e50689-a076-4e0b-912a-332f09312e72"} 2024-05-16T03:59:30.279Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "b32f334b-2da3-4d1d-bd6d-20cb2dfe5c6b"} 2024-05-16T03:59:35.581Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "4824779a-7f27-43c6-a872-500f5d288e77"} 2024-05-16T03:59:37.344Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "7a7aa860-5565-41df-bc07-4cba1959c818", "user": "operator"} 2024-05-16T03:59:37.361Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "7a7aa860-5565-41df-bc07-4cba1959c818", "user": "operator"} 2024-05-16T03:59:37.371Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "7a7aa860-5565-41df-bc07-4cba1959c818", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-16T03:59:37.380Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "7a7aa860-5565-41df-bc07-4cba1959c818", "user": "operator"} 2024-05-16T03:59:37.421Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "7a7aa860-5565-41df-bc07-4cba1959c818", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-16T03:59:38.759Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "7a7aa860-5565-41df-bc07-4cba1959c818", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-21638.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-21638.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-21638.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-21638.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-21638.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-21638.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-16T04:00:08.099Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "781012d1-a5b4-40ed-831f-e6e6555e64cb"} 2024-05-16T04:00:16.265Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6bf04cb8-0a0f-4fff-9980-2b614452c2dd"} 2024-05-16T04:00:21.534Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "05e38aa8-f4b6-4274-b805-e6748c080766"} 2024-05-16T04:00:26.832Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "af49d576-c821-4f89-a0cc-df2e24e26b04"} 2024-05-16T04:00:32.249Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "7d2d2814-8cdf-4c7c-b822-22c64be2678b"} 2024-05-16T04:00:36.554Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "secrets": "my-cluster-secrets-2"} 2024-05-16T04:00:36.554Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "root"} 2024-05-16T04:00:36.581Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "root"} 2024-05-16T04:00:36.593Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "secret": "some-name-mysql-init", "user": "root"} 2024-05-16T04:00:38.964Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "7d5c3631-3406-425a-9b7a-a4f702329f54"} 2024-05-16T04:00:41.377Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c"} 2024-05-16T04:00:41.388Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "root"} 2024-05-16T04:00:41.388Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "operator"} 2024-05-16T04:00:41.408Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "operator"} 2024-05-16T04:00:41.420Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-16T04:00:41.428Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "operator"} 2024-05-16T04:00:41.428Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "monitor"} 2024-05-16T04:00:41.447Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "monitor"} 2024-05-16T04:00:41.459Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-16T04:00:41.494Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "monitor"} 2024-05-16T04:00:41.505Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "monitor"} 2024-05-16T04:00:41.505Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "xtrabackup"} 2024-05-16T04:00:41.523Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "xtrabackup"} 2024-05-16T04:00:41.532Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-16T04:00:41.545Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "xtrabackup"} 2024-05-16T04:00:41.545Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "replication"} 2024-05-16T04:00:41.564Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "replication"} 2024-05-16T04:00:41.572Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-16T04:00:41.587Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "replication"} 2024-05-16T04:00:41.587Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "proxyadmin"} 2024-05-16T04:00:41.618Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "proxyadmin"} 2024-05-16T04:00:41.627Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "user": "proxyadmin"} 2024-05-16T04:00:41.628Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "last-applied-secret": "3eb282217b67ad5bf946dd8241e94b3df4020e03c9e902ed7479d9feeadd2823"} 2024-05-16T04:00:41.628Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "last-applied-secret": "3eb282217b67ad5bf946dd8241e94b3df4020e03c9e902ed7479d9feeadd2823"} 2024-05-16T04:00:41.952Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "471aea51-55f3-415d-8fa4-0d170d82395c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-16T04:00:42.516Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "115fa49b-d9f8-4cfc-adca-f66bb1622937", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-16T04:02:23.695Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "3b975dc5-5e7f-4e47-9899-9e41346e0d73", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-21638 on 10.67.32.10:53: no such host"} 2024-05-16T04:02:33.984Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "fa00e8f4-d8b5-43d1-94e0-a72257a94139", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.24.34.80:33062: connect: connection refused"} 2024-05-16T04:02:39.239Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "0761cb11-3ccd-42bd-af75-3269684fbd9c", "primary name": "some-name-pxc-0.some-name-pxc.users-21638.svc.cluster.local"} 2024-05-16T04:02:44.417Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "dd68c5e2-0797-460f-8c3d-bb013c93ba26", "primary name": "some-name-pxc-0.some-name-pxc.users-21638.svc.cluster.local"} 2024-05-16T04:02:49.577Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "5f147a79-08d7-4e1c-ac74-973ab2c3a613", "primary name": "some-name-pxc-0.some-name-pxc.users-21638.svc.cluster.local"} 2024-05-16T04:02:54.887Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "9c9f808d-35d7-4c6e-acfe-575aebfe3c76", "primary name": "some-name-pxc-0.some-name-pxc.users-21638.svc.cluster.local"} 2024-05-16T04:03:00.111Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e4c67aba-53c3-4b09-ba8b-68d3d3b0d222", "primary name": "some-name-pxc-0.some-name-pxc.users-21638.svc.cluster.local"} 2024-05-16T04:03:05.297Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "fba44100-f193-458e-983e-1e67fc8cec81", "primary name": "some-name-pxc-0.some-name-pxc.users-21638.svc.cluster.local"} 2024-05-16T04:03:10.503Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "bd00654e-e32d-4f8d-a292-e99014986987", "primary name": "some-name-pxc-0.some-name-pxc.users-21638.svc.cluster.local"} 2024-05-16T04:03:24.766Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "57e236fb-5ed9-4d29-85a8-391f818cab42"} 2024-05-16T04:03:29.829Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "61bf52c1-db57-4f0b-babd-5cf530bcfac7"} 2024-05-16T04:03:35.232Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "d1900627-ecbf-4ae9-8b1a-a71b2bd10702"} 2024-05-16T04:03:40.165Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "9a73341e-01ce-4511-85b3-c95fbe74ccf9"} 2024-05-16T04:03:46.027Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "191ad7a0-8f0a-47ee-a1f3-f16bf36ea1da"} 2024-05-16T04:03:47.321Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "0a7455c1-3df2-4f33-889d-df35fc66aba0", "user": "operator"} 2024-05-16T04:03:47.341Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "0a7455c1-3df2-4f33-889d-df35fc66aba0", "user": "operator"} 2024-05-16T04:03:47.350Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "0a7455c1-3df2-4f33-889d-df35fc66aba0", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-16T04:03:47.360Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "0a7455c1-3df2-4f33-889d-df35fc66aba0", "user": "operator"} 2024-05-16T04:03:47.360Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "0a7455c1-3df2-4f33-889d-df35fc66aba0", "last-applied-secret": "f697121215537c283d66504ce4c55d8c3be94ccf6a3c05ba6b418c1e0100e04b"} 2024-05-16T04:03:48.661Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "0a7455c1-3df2-4f33-889d-df35fc66aba0", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-21638.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-21638.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-21638.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-21638.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-21638.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-21638.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-16T04:04:32.897Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "2eae5e9b-723b-465f-b71b-d880acd35322"} 2024-05-16T04:04:40.750Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "48aaa0ed-792f-474f-9929-d32d7473b4c6"} 2024-05-16T04:04:46.062Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "f3135815-8840-4fd8-ad90-0a55f90a1ead"} 2024-05-16T04:04:51.560Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "b214e827-0299-4441-b1cd-a0f0f06ec3b8"} 2024-05-16T04:04:56.946Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "f751a62b-bf7a-4af6-b0c5-27cb3f9d7e47"} 2024-05-16T04:05:02.442Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "7cd1f42a-91df-4813-a377-8e19861650fa"} 2024-05-16T04:05:07.241Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "aecdf00c-5b8a-4582-92a3-749273a21ef9"} 2024-05-16T04:05:12.562Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "42496fda-b5cd-41fb-92c8-319a088ceb8c"} 2024-05-16T04:05:17.863Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e52ceba1-21e9-4a49-8b3b-775c92b658f8"} 2024-05-16T04:05:24.906Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "f9d43056-83d4-4cfd-b290-a51790050da1"} 2024-05-16T04:05:28.559Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "a73b1106-aff6-42e6-97cf-14b74109c8a2"} 2024-05-16T04:05:33.856Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "a1bb83c3-da79-409f-9311-743b5ed566b0"} 2024-05-16T04:05:39.148Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "762b1ba1-a39f-47f1-8651-e17152cbfdc4"} 2024-05-16T04:05:44.467Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "05bcdf1b-c4d9-464a-8480-dd4cf1c8ddd6"} 2024-05-16T04:05:49.871Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "3020e2ee-e6b8-4313-ae59-d02efdb342dd"} 2024-05-16T04:05:55.029Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e09c4b9d-ff12-4408-9f5d-7015e1394da9"} 2024-05-16T04:06:00.345Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "24f6ea38-cd65-4339-bf86-7fa7a6691d8d"} 2024-05-16T04:06:05.574Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "2086f7e2-927f-43a2-916b-40843b5b0b84"} 2024-05-16T04:06:07.329Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "root"} 2024-05-16T04:06:07.358Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "root"} 2024-05-16T04:06:07.370Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "secret": "some-name-mysql-init", "user": "root"} 2024-05-16T04:06:13.150Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c"} 2024-05-16T04:06:13.167Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "root"} 2024-05-16T04:06:13.168Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "monitor"} 2024-05-16T04:06:13.187Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "monitor"} 2024-05-16T04:06:13.198Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-16T04:06:13.232Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "monitor"} 2024-05-16T04:06:13.246Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "monitor"} 2024-05-16T04:06:13.246Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "xtrabackup"} 2024-05-16T04:06:13.264Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "xtrabackup"} 2024-05-16T04:06:13.275Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-16T04:06:13.289Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "xtrabackup"} 2024-05-16T04:06:13.289Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "proxyadmin"} 2024-05-16T04:06:13.330Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "proxyadmin"} 2024-05-16T04:06:13.341Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "user": "proxyadmin"} 2024-05-16T04:06:13.341Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "last-applied-secret": "a77c93ba046e28518524b135e829ff39eb047e4974111f8d57953e7babe71c70"} 2024-05-16T04:06:13.342Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "last-applied-secret": "a77c93ba046e28518524b135e829ff39eb047e4974111f8d57953e7babe71c70"} 2024-05-16T04:06:13.619Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "e0a30909-8c91-4f82-9619-9a227dadf43c", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-05-16T04:06:27.267Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: c55df239-8504-43c1-b81d-2e0c9543a9b5 2024-05-16T04:07:51.911Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "98961736-b914-43de-a8a6-15f6952706ee", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-21638 on 10.67.32.10:53: no such host"} 2024-05-16T04:08:38.771Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "root"} 2024-05-16T04:08:38.794Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "root"} 2024-05-16T04:08:38.801Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "secret": "some-name-mysql-init", "user": "root"} 2024-05-16T04:08:38.815Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "root"} 2024-05-16T04:08:38.815Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "operator"} 2024-05-16T04:08:38.829Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "operator"} 2024-05-16T04:08:38.839Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "secret": "some-name-mysql-init", "user": "operator"} 2024-05-16T04:08:38.850Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "operator"} 2024-05-16T04:08:38.851Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "monitor"} 2024-05-16T04:08:38.867Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "monitor"} 2024-05-16T04:08:38.876Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-16T04:08:38.887Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "monitor"} 2024-05-16T04:08:38.887Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "xtrabackup"} 2024-05-16T04:08:38.904Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "xtrabackup"} 2024-05-16T04:08:38.914Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-05-16T04:08:38.926Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "xtrabackup"} 2024-05-16T04:08:38.926Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "replication"} 2024-05-16T04:08:38.949Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "replication"} 2024-05-16T04:08:38.962Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "secret": "some-name-mysql-init", "user": "replication"} 2024-05-16T04:08:38.972Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "user": "replication"} 2024-05-16T04:08:38.972Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-16T04:08:38.972Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "6df1a87e-1e11-4ade-b530-c377bfc8a235", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-05-16T04:08:40.225Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "39a779ab-c5f3-49a1-8cd7-95195eefe0c6", "err": "failed to connect to pod some-name-pxc-2: dial tcp: lookup some-name-pxc-2.some-name-pxc.users-21638 on 10.67.32.10:53: no such host"} 2024-05-16T04:11:20.582Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "840e1dbe-06f5-42bd-85e4-6c89c0005e10", "user": "monitor"} 2024-05-16T04:11:20.598Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "840e1dbe-06f5-42bd-85e4-6c89c0005e10", "user": "monitor"} 2024-05-16T04:11:20.608Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "840e1dbe-06f5-42bd-85e4-6c89c0005e10", "secret": "some-name-mysql-init", "user": "monitor"} 2024-05-16T04:11:20.617Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "840e1dbe-06f5-42bd-85e4-6c89c0005e10", "user": "monitor"} 2024-05-16T04:11:20.617Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-21638", "name": "some-name", "reconcileID": "840e1dbe-06f5-42bd-85e4-6c89c0005e10", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/05/16 04:08:12 packets.go:37: read tcp 10.24.34.75:36490->10.67.47.149:3306: i/o timeout [mysql] 2024/05/16 04:10:44 packets.go:37: unexpected EOF [mysql] 2024/05/16 04:11:26 packets.go:37: unexpected EOF [mysql] 2024/05/16 04:11:32 packets.go:37: unexpected EOF sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-21638 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.ttK6PXM3R4 ++ mktemp + local LAST_ERR=/tmp/tmp.nX72aktsZX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ttK6PXM3R4 perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.nX72aktsZX + rm /tmp/tmp.ttK6PXM3R4 /tmp/tmp.nX72aktsZX + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.nlJl6UiMvL ++ mktemp + local LAST_ERR=/tmp/tmp.gFCd0PUlZt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.nlJl6UiMvL No resources found + cat /tmp/tmp.gFCd0PUlZt + rm /tmp/tmp.nlJl6UiMvL /tmp/tmp.gFCd0PUlZt + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.EeIbqRUtdh ++ mktemp + local LAST_ERR=/tmp/tmp.IjOYjcszTv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.EeIbqRUtdh No resources found + cat /tmp/tmp.IjOYjcszTv + rm /tmp/tmp.EeIbqRUtdh /tmp/tmp.IjOYjcszTv + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.qTjfkbkBeR ++ mktemp + local LAST_ERR=/tmp/tmp.cTjdyfM6nM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.qTjfkbkBeR validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.cTjdyfM6nM + rm /tmp/tmp.qTjfkbkBeR /tmp/tmp.cTjdyfM6nM + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-21638 + rm -rf /tmp/tmp.TjI7fh8Fme + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed+ local LAST_OUT=/tmp/tmp.K15jLIR9Oy ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.Imii2Ou5u2 ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.DgDQtQMxoO + local exit_status=0 + local LAST_ERR=/tmp/tmp.IHdNTfu0ty + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-21638