Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-9787 + local ns=users-9787 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-19694 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.K5VjflFHED ++ mktemp + local LAST_ERR=/tmp/tmp.fh0m1RYQM5 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.K5VjflFHED perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.fh0m1RYQM5 + rm /tmp/tmp.K5VjflFHED /tmp/tmp.fh0m1RYQM5 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.1fcwbt6G6C ++ mktemp + local LAST_ERR=/tmp/tmp.XDKrCE7tiN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.1fcwbt6G6C No resources found + cat /tmp/tmp.XDKrCE7tiN + rm /tmp/tmp.1fcwbt6G6C /tmp/tmp.XDKrCE7tiN + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.CZsSdO201e ++ mktemp + local LAST_ERR=/tmp/tmp.YEkNCtYvBC + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.CZsSdO201e No resources found + cat /tmp/tmp.YEkNCtYvBC + rm /tmp/tmp.CZsSdO201e /tmp/tmp.YEkNCtYvBC + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + awk '{print$1}' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin delete namespace pxc-operator + xargs kubectl delete ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.n8sB2kTima + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' ++ mktemp + local LAST_OUT=/tmp/tmp.SrcQHW4A4D ++ mktemp + local LAST_ERR=/tmp/tmp.3thPkj9790 + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.ySoWFXNPZ9 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SrcQHW4A4D + cat /tmp/tmp.ySoWFXNPZ9 + rm /tmp/tmp.SrcQHW4A4D /tmp/tmp.ySoWFXNPZ9 + return 0 namespace "users-19694" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.n8sB2kTima namespace "pxc-operator" deleted + cat /tmp/tmp.3thPkj9790 + rm /tmp/tmp.n8sB2kTima /tmp/tmp.3thPkj9790 + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.N1CRUktE77 ++ mktemp + local LAST_ERR=/tmp/tmp.qNSDgtupqF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.N1CRUktE77 namespace/pxc-operator created + cat /tmp/tmp.qNSDgtupqF + rm /tmp/tmp.N1CRUktE77 /tmp/tmp.qNSDgtupqF + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.jQ2bAmhaUx +++ mktemp ++ local LAST_ERR=/tmp/tmp.7mIMOmBJAJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jQ2bAmhaUx ++ cat /tmp/tmp.7mIMOmBJAJ ++ rm /tmp/tmp.jQ2bAmhaUx /tmp/tmp.7mIMOmBJAJ ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1750-de45ff75-2-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.x4NKRi0Eya ++ mktemp + local LAST_ERR=/tmp/tmp.cZ6HKLJvZa + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1750-de45ff75-2-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.x4NKRi0Eya Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1750-de45ff75-2-cluster8" modified. + cat /tmp/tmp.cZ6HKLJvZa + rm /tmp/tmp.x4NKRi0Eya /tmp/tmp.cZ6HKLJvZa + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.ISBRN4Hf1f ++ mktemp + local LAST_ERR=/tmp/tmp.aJxCQ3cOjB + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ISBRN4Hf1f customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.aJxCQ3cOjB + rm /tmp/tmp.ISBRN4Hf1f /tmp/tmp.aJxCQ3cOjB + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/deploy/cw-rbac.yaml + kubectl_bin apply -f - + sed -e 's^namespace: .*^namespace: pxc-operator^' ++ mktemp + local LAST_OUT=/tmp/tmp.3kGvULEw1s ++ mktemp + local LAST_ERR=/tmp/tmp.hNe9DaNasM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3kGvULEw1s clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.hNe9DaNasM + rm /tmp/tmp.3kGvULEw1s /tmp/tmp.hNe9DaNasM + return 0 + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/deploy/cw-operator.yaml + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1750-de45ff75^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.NGsSJ9qUXk ++ mktemp + local LAST_ERR=/tmp/tmp.j5vTLPn9tb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.NGsSJ9qUXk deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.j5vTLPn9tb + rm /tmp/tmp.NGsSJ9qUXk /tmp/tmp.j5vTLPn9tb + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.gMyh7TyZTy ++ mktemp + local LAST_ERR=/tmp/tmp.5dYVnuMhkk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gMyh7TyZTy pod/percona-xtradb-cluster-operator-7c5cdd5dd9-mtsvq condition met + cat /tmp/tmp.5dYVnuMhkk + rm /tmp/tmp.gMyh7TyZTy /tmp/tmp.5dYVnuMhkk + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.32VtfBRe6M +++ mktemp ++ local LAST_ERR=/tmp/tmp.NXjy1rZoA5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.32VtfBRe6M ++ cat /tmp/tmp.NXjy1rZoA5 ++ rm /tmp/tmp.32VtfBRe6M /tmp/tmp.NXjy1rZoA5 ++ return 0 + wait_pod percona-xtradb-cluster-operator-7c5cdd5dd9-mtsvq 480 pxc-operator + local pod=percona-xtradb-cluster-operator-7c5cdd5dd9-mtsvq + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-7c5cdd5dd9-mtsvq ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-7c5cdd5dd9-mtsvq condition met percona-xtradb-cluster-operator-7c5cdd5dd9-mtsvq.Ok + sleep 3 + create_namespace users-9787 + local namespace=users-9787 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ sed s/NAMESPACE// ++ awk '-F ' '{print $2}' + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + '[' -n '' ']' + awk '{print$1}' + desc 'cleaned up old namespaces users-9787' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-9787 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-9787 + kubectl_bin get ns ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.q8d5MHIjqL + local LAST_OUT=/tmp/tmp.7QHzhtX3jQ ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.1Nt67WTqEQ + local exit_status=0 + local LAST_ERR=/tmp/tmp.1GzLgUKZ9c + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-9787 + exit_status=1 + exit_status=0 + set -e + set -e + '[' 1 '!=' 0 ']' + '[' 0 '!=' 0 ']' + break + '[' 1 == 1 ']' + cat /tmp/tmp.q8d5MHIjqL + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-9787 + cat /tmp/tmp.1Nt67WTqEQ + rm /tmp/tmp.q8d5MHIjqL /tmp/tmp.1Nt67WTqEQ + return 0 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-9787 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.7QHzhtX3jQ + cat /tmp/tmp.1GzLgUKZ9c Error from server (NotFound): namespaces "users-9787" not found + rm /tmp/tmp.7QHzhtX3jQ /tmp/tmp.1GzLgUKZ9c + return 1 + : + wait_for_delete namespace/users-9787 + local res=namespace/users-9787 + echo -n 'namespace/users-9787 - ' namespace/users-9787 - + set +o xtrace Error from server (NotFound): namespaces "users-9787" not found + desc 'create namespace users-9787' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-9787 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-9787 ++ mktemp + local LAST_OUT=/tmp/tmp.ug0ERY5In2 ++ mktemp + local LAST_ERR=/tmp/tmp.PyTEh74QlD + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-9787 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ug0ERY5In2 namespace/users-9787 created + cat /tmp/tmp.PyTEh74QlD + rm /tmp/tmp.ug0ERY5In2 /tmp/tmp.PyTEh74QlD + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.WLID0w50X9 +++ mktemp ++ local LAST_ERR=/tmp/tmp.x30lpZ7my7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WLID0w50X9 ++ cat /tmp/tmp.x30lpZ7my7 ++ rm /tmp/tmp.WLID0w50X9 /tmp/tmp.x30lpZ7my7 ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1750-de45ff75-2-cluster8 --namespace=users-9787 ++ mktemp + local LAST_OUT=/tmp/tmp.dmtL7Ib4K2 ++ mktemp + local LAST_ERR=/tmp/tmp.Hl6u6eqwYh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1750-de45ff75-2-cluster8 --namespace=users-9787 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dmtL7Ib4K2 Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1750-de45ff75-2-cluster8" modified. + cat /tmp/tmp.Hl6u6eqwYh + rm /tmp/tmp.dmtL7Ib4K2 /tmp/tmp.Hl6u6eqwYh + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.n2IauFIukN ++ mktemp + local LAST_ERR=/tmp/tmp.sXORCKsmGg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.n2IauFIukN secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.sXORCKsmGg + rm /tmp/tmp.n2IauFIukN /tmp/tmp.sXORCKsmGg + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.xJHd7w8drX ++ mktemp + local LAST_ERR=/tmp/tmp.iPjU88Wntb + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.xJHd7w8drX secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.iPjU88Wntb + rm /tmp/tmp.xJHd7w8drX /tmp/tmp.iPjU88Wntb + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/client.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/client.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/client.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_OUT=/tmp/tmp.5RFCMafpAW + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1750-de45ff75#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-9787~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + local LAST_ERR=/tmp/tmp.l8hpVRIqh5 + local exit_status=0 ++ seq 0 2 + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5RFCMafpAW deployment.apps/pxc-client created + cat /tmp/tmp.l8hpVRIqh5 + rm /tmp/tmp.5RFCMafpAW /tmp/tmp.l8hpVRIqh5 + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/conf/some-name.yml + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.R0gsj6pzTT + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1750-de45ff75#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' ++ mktemp + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + local LAST_ERR=/tmp/tmp.gHjQ45ykxV + local exit_status=0 + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-9787~ + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.R0gsj6pzTT perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.gHjQ45ykxV + rm /tmp/tmp.R0gsj6pzTT /tmp/tmp.gHjQ45ykxV + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.vHrtBXrXO4 ++++ mktemp +++ local LAST_ERR=/tmp/tmp.0fdQSxXll5 +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.vHrtBXrXO4 +++ cat /tmp/tmp.0fdQSxXll5 +++ rm /tmp/tmp.vHrtBXrXO4 /tmp/tmp.0fdQSxXll5 +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.cGJt7XPfvk ++++ mktemp +++ local LAST_ERR=/tmp/tmp.4dQtmfvG8M +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.cGJt7XPfvk +++ cat /tmp/tmp.4dQtmfvG8M +++ rm /tmp/tmp.cGJt7XPfvk /tmp/tmp.4dQtmfvG8M +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9787 ++ mktemp + local LAST_OUT=/tmp/tmp.HaTY11umIl ++ mktemp + local LAST_ERR=/tmp/tmp.zbgrSz4i8L + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9787 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9787 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-9787 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.HaTY11umIl + cat /tmp/tmp.zbgrSz4i8L error: no matching resources found + rm /tmp/tmp.HaTY11umIl /tmp/tmp.zbgrSz4i8L + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.anUCejkyGW +++ mktemp ++ local LAST_ERR=/tmp/tmp.TjRokDlWqC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.anUCejkyGW ++ cat /tmp/tmp.TjRokDlWqC Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.anUCejkyGW /tmp/tmp.TjRokDlWqC ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WIMNz1HM8Z +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y4JoWOaLDY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WIMNz1HM8Z ++ cat /tmp/tmp.Y4JoWOaLDY ++ rm /tmp/tmp.WIMNz1HM8Z /tmp/tmp.Y4JoWOaLDY ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7fuf7JVdUX +++ mktemp ++ local LAST_ERR=/tmp/tmp.XpI2osIQGp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7fuf7JVdUX ++ cat /tmp/tmp.XpI2osIQGp ++ rm /tmp/tmp.7fuf7JVdUX /tmp/tmp.XpI2osIQGp ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NFfffOhcY4 +++ mktemp ++ local LAST_ERR=/tmp/tmp.3MVIYFlLgL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NFfffOhcY4 ++ cat /tmp/tmp.3MVIYFlLgL ++ rm /tmp/tmp.NFfffOhcY4 /tmp/tmp.3MVIYFlLgL ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-1.sql /tmp/tmp.nwQ5KJ49HI/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Zb0Yr2jVXD +++ mktemp ++ local LAST_ERR=/tmp/tmp.4wGoKzR8FD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Zb0Yr2jVXD ++ cat /tmp/tmp.4wGoKzR8FD ++ rm /tmp/tmp.Zb0Yr2jVXD /tmp/tmp.4wGoKzR8FD ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-1.sql /tmp/tmp.nwQ5KJ49HI/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yKTirFxP1j +++ mktemp ++ local LAST_ERR=/tmp/tmp.KdLl662NM0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yKTirFxP1j ++ cat /tmp/tmp.KdLl662NM0 ++ rm /tmp/tmp.yKTirFxP1j /tmp/tmp.KdLl662NM0 ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-1.sql /tmp/tmp.nwQ5KJ49HI/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hDMnmCPMJd +++ mktemp ++ local LAST_ERR=/tmp/tmp.zYyUCgRymB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hDMnmCPMJd ++ cat /tmp/tmp.zYyUCgRymB Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.hDMnmCPMJd /tmp/tmp.zYyUCgRymB ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.dmRZYEb91G ++ mktemp + local LAST_ERR=/tmp/tmp.K2l1XCNZAJ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dmRZYEb91G secret/my-cluster-secrets patched + cat /tmp/tmp.K2l1XCNZAJ + rm /tmp/tmp.dmRZYEb91G /tmp/tmp.K2l1XCNZAJ + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.oAsUpzNUlu +++ mktemp ++ local LAST_ERR=/tmp/tmp.UQeIfkWkIS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.oAsUpzNUlu ++ cat /tmp/tmp.UQeIfkWkIS ++ rm /tmp/tmp.oAsUpzNUlu /tmp/tmp.UQeIfkWkIS ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-4.sql /tmp/tmp.nwQ5KJ49HI/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ggnEwszaNW ++ mktemp + local LAST_ERR=/tmp/tmp.oxViQ3ezKn + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ggnEwszaNW perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.oxViQ3ezKn + rm /tmp/tmp.ggnEwszaNW /tmp/tmp.oxViQ3ezKn + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EExd0WFgdE +++ mktemp ++ local LAST_ERR=/tmp/tmp.jd0VwbwUa3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EExd0WFgdE ++ cat /tmp/tmp.jd0VwbwUa3 ++ rm /tmp/tmp.EExd0WFgdE /tmp/tmp.jd0VwbwUa3 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.wAnYGYrp9m +++ mktemp ++ local LAST_ERR=/tmp/tmp.TQG739jzyO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.wAnYGYrp9m ++ cat /tmp/tmp.TQG739jzyO ++ rm /tmp/tmp.wAnYGYrp9m /tmp/tmp.TQG739jzyO ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AmDa9lCQ0p ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.3APH8x26AW +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AmDa9lCQ0p +++++ cat /tmp/tmp.3APH8x26AW +++++ rm /tmp/tmp.AmDa9lCQ0p /tmp/tmp.3APH8x26AW +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.48zLGkXz9a ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.9IyR138YyZ +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.48zLGkXz9a +++++ cat /tmp/tmp.9IyR138YyZ +++++ rm /tmp/tmp.48zLGkXz9a /tmp/tmp.9IyR138YyZ +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ay9VhDw8WL +++ mktemp ++ local LAST_ERR=/tmp/tmp.iIqhjIkwUj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ay9VhDw8WL ++ cat /tmp/tmp.iIqhjIkwUj ++ rm /tmp/tmp.ay9VhDw8WL /tmp/tmp.iIqhjIkwUj ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.zBl1PRbYxp ++ mktemp + local LAST_ERR=/tmp/tmp.H5ZW7BQ5Ki + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zBl1PRbYxp secret/my-cluster-secrets patched + cat /tmp/tmp.H5ZW7BQ5Ki + rm /tmp/tmp.zBl1PRbYxp /tmp/tmp.H5ZW7BQ5Ki + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RWjSHxz8p3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.pF42b63vcr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RWjSHxz8p3 ++ cat /tmp/tmp.pF42b63vcr ++ rm /tmp/tmp.RWjSHxz8p3 /tmp/tmp.pF42b63vcr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aCAgdst2LU +++ mktemp ++ local LAST_ERR=/tmp/tmp.TjakDvPoKH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aCAgdst2LU ++ cat /tmp/tmp.TjakDvPoKH ++ rm /tmp/tmp.aCAgdst2LU /tmp/tmp.TjakDvPoKH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BkUUAcvE6v +++ mktemp ++ local LAST_ERR=/tmp/tmp.eh9DYfwVHy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BkUUAcvE6v ++ cat /tmp/tmp.eh9DYfwVHy ++ rm /tmp/tmp.BkUUAcvE6v /tmp/tmp.eh9DYfwVHy ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xlUZuEI1SC +++ mktemp ++ local LAST_ERR=/tmp/tmp.sGtQWD9gt2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xlUZuEI1SC ++ cat /tmp/tmp.sGtQWD9gt2 ++ rm /tmp/tmp.xlUZuEI1SC /tmp/tmp.sGtQWD9gt2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.A4jfWgkzJ8 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.gBm31frMow +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.A4jfWgkzJ8 +++++ cat /tmp/tmp.gBm31frMow +++++ rm /tmp/tmp.A4jfWgkzJ8 /tmp/tmp.gBm31frMow +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.8u5bYZXUv2 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.BJNXID1WNx +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.8u5bYZXUv2 +++++ cat /tmp/tmp.BJNXID1WNx +++++ rm /tmp/tmp.8u5bYZXUv2 /tmp/tmp.BJNXID1WNx +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.w0VctYO9uz +++ mktemp ++ local LAST_ERR=/tmp/tmp.OIQM5BbPIh ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.w0VctYO9uz ++ cat /tmp/tmp.OIQM5BbPIh ++ rm /tmp/tmp.w0VctYO9uz /tmp/tmp.OIQM5BbPIh ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-2.sql /tmp/tmp.nwQ5KJ49HI/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-2.sql /tmp/tmp.nwQ5KJ49HI/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-2.sql /tmp/tmp.nwQ5KJ49HI/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.epEu6UOcuN ++ mktemp + local LAST_ERR=/tmp/tmp.y2oSmvCRFo + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.epEu6UOcuN perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.y2oSmvCRFo + rm /tmp/tmp.epEu6UOcuN /tmp/tmp.y2oSmvCRFo + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.d47gUaKZ3z ++ mktemp + local LAST_ERR=/tmp/tmp.jPKCTAgJlp + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.d47gUaKZ3z secret/my-cluster-secrets patched + cat /tmp/tmp.jPKCTAgJlp + rm /tmp/tmp.d47gUaKZ3z /tmp/tmp.jPKCTAgJlp + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.j8p3zKZG5a +++ mktemp ++ local LAST_ERR=/tmp/tmp.cO2AgiUCDg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.j8p3zKZG5a ++ cat /tmp/tmp.cO2AgiUCDg ++ rm /tmp/tmp.j8p3zKZG5a /tmp/tmp.cO2AgiUCDg ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KRQ70RWIDk +++ mktemp ++ local LAST_ERR=/tmp/tmp.6ESojEKBrQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KRQ70RWIDk ++ cat /tmp/tmp.6ESojEKBrQ ++ rm /tmp/tmp.KRQ70RWIDk /tmp/tmp.6ESojEKBrQ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eSuIZMTwHF +++ mktemp ++ local LAST_ERR=/tmp/tmp.XWpbzTpoCS ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eSuIZMTwHF ++ cat /tmp/tmp.XWpbzTpoCS ++ rm /tmp/tmp.eSuIZMTwHF /tmp/tmp.XWpbzTpoCS ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WskYx4kxcY +++ mktemp ++ local LAST_ERR=/tmp/tmp.dSCSjY0vrV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WskYx4kxcY ++ cat /tmp/tmp.dSCSjY0vrV ++ rm /tmp/tmp.WskYx4kxcY /tmp/tmp.dSCSjY0vrV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.n12FXIbvPn +++ mktemp ++ local LAST_ERR=/tmp/tmp.vBXU4gDcA6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.n12FXIbvPn ++ cat /tmp/tmp.vBXU4gDcA6 ++ rm /tmp/tmp.n12FXIbvPn /tmp/tmp.vBXU4gDcA6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7AAR0hatIV +++ mktemp ++ local LAST_ERR=/tmp/tmp.Kbq7xUnPnH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7AAR0hatIV ++ cat /tmp/tmp.Kbq7xUnPnH ++ rm /tmp/tmp.7AAR0hatIV /tmp/tmp.Kbq7xUnPnH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.muGslGuM0V +++ mktemp ++ local LAST_ERR=/tmp/tmp.EKmRGxvPrX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.muGslGuM0V ++ cat /tmp/tmp.EKmRGxvPrX ++ rm /tmp/tmp.muGslGuM0V /tmp/tmp.EKmRGxvPrX ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.67RB3Ct1Ac +++ mktemp ++ local LAST_ERR=/tmp/tmp.jSlQFJIjzr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.67RB3Ct1Ac ++ cat /tmp/tmp.jSlQFJIjzr ++ rm /tmp/tmp.67RB3Ct1Ac /tmp/tmp.jSlQFJIjzr ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5qx6hOCoLr +++ mktemp ++ local LAST_ERR=/tmp/tmp.2D9tlhyaW5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5qx6hOCoLr ++ cat /tmp/tmp.2D9tlhyaW5 ++ rm /tmp/tmp.5qx6hOCoLr /tmp/tmp.2D9tlhyaW5 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.BVJC9ORCOT ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.EwSMKDFJS6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.BVJC9ORCOT +++++ cat /tmp/tmp.EwSMKDFJS6 +++++ rm /tmp/tmp.BVJC9ORCOT /tmp/tmp.EwSMKDFJS6 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.FeIdZ6AKsP ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.69QpVB97Rp +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.FeIdZ6AKsP +++++ cat /tmp/tmp.69QpVB97Rp +++++ rm /tmp/tmp.FeIdZ6AKsP /tmp/tmp.69QpVB97Rp +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QUtbtq37TL +++ mktemp ++ local LAST_ERR=/tmp/tmp.SAVueWsEwQ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QUtbtq37TL ++ cat /tmp/tmp.SAVueWsEwQ ++ rm /tmp/tmp.QUtbtq37TL /tmp/tmp.SAVueWsEwQ ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-3.sql /tmp/tmp.nwQ5KJ49HI/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.KfUX3LyK59 ++ mktemp + local LAST_ERR=/tmp/tmp.qAKzhgVzUc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.KfUX3LyK59 secret/my-cluster-secrets patched + cat /tmp/tmp.qAKzhgVzUc + rm /tmp/tmp.KfUX3LyK59 /tmp/tmp.qAKzhgVzUc + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.2MpBEf1iCr +++ mktemp ++ local LAST_ERR=/tmp/tmp.juL0YZ80by ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2MpBEf1iCr ++ cat /tmp/tmp.juL0YZ80by ++ rm /tmp/tmp.2MpBEf1iCr /tmp/tmp.juL0YZ80by ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Hz2IedVNfI +++ mktemp ++ local LAST_ERR=/tmp/tmp.hdBCOqgIF1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Hz2IedVNfI ++ cat /tmp/tmp.hdBCOqgIF1 ++ rm /tmp/tmp.Hz2IedVNfI /tmp/tmp.hdBCOqgIF1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7cAcbiWhWh +++ mktemp ++ local LAST_ERR=/tmp/tmp.lN4cUokvqW ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7cAcbiWhWh ++ cat /tmp/tmp.lN4cUokvqW ++ rm /tmp/tmp.7cAcbiWhWh /tmp/tmp.lN4cUokvqW ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8wK0nX1kMv +++ mktemp ++ local LAST_ERR=/tmp/tmp.ak1zsCSdPM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8wK0nX1kMv ++ cat /tmp/tmp.ak1zsCSdPM ++ rm /tmp/tmp.8wK0nX1kMv /tmp/tmp.ak1zsCSdPM ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gLPZI4QIAo +++ mktemp ++ local LAST_ERR=/tmp/tmp.gWHeOce32f ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gLPZI4QIAo ++ cat /tmp/tmp.gWHeOce32f ++ rm /tmp/tmp.gLPZI4QIAo /tmp/tmp.gWHeOce32f ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.H9rfdOK9q0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1GNMHf5C38 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.H9rfdOK9q0 +++++ cat /tmp/tmp.1GNMHf5C38 +++++ rm /tmp/tmp.H9rfdOK9q0 /tmp/tmp.1GNMHf5C38 +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.hRkwE40650 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.TcnfYM1fqe +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.hRkwE40650 +++++ cat /tmp/tmp.TcnfYM1fqe +++++ rm /tmp/tmp.hRkwE40650 /tmp/tmp.TcnfYM1fqe +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Z5Tzq7rMGA +++ mktemp ++ local LAST_ERR=/tmp/tmp.C31nrrp6lT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Z5Tzq7rMGA ++ cat /tmp/tmp.C31nrrp6lT ++ rm /tmp/tmp.Z5Tzq7rMGA /tmp/tmp.C31nrrp6lT ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.R1RjhBUb3H +++ mktemp ++ local LAST_ERR=/tmp/tmp.c3N0hpn9oD ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.R1RjhBUb3H ++ cat /tmp/tmp.c3N0hpn9oD ++ rm /tmp/tmp.R1RjhBUb3H /tmp/tmp.c3N0hpn9oD ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-4.sql /tmp/tmp.nwQ5KJ49HI/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.cb5vK06RYM ++ mktemp + local LAST_ERR=/tmp/tmp.gxMidiJj7u + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.cb5vK06RYM secret/my-cluster-secrets patched + cat /tmp/tmp.gxMidiJj7u + rm /tmp/tmp.cb5vK06RYM /tmp/tmp.gxMidiJj7u + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cHkNvtPUHX +++ mktemp ++ local LAST_ERR=/tmp/tmp.kg9j7rbZuM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cHkNvtPUHX ++ cat /tmp/tmp.kg9j7rbZuM ++ rm /tmp/tmp.cHkNvtPUHX /tmp/tmp.kg9j7rbZuM ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aBCbrZke50 +++ mktemp ++ local LAST_ERR=/tmp/tmp.YrLL9OCrtO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aBCbrZke50 ++ cat /tmp/tmp.YrLL9OCrtO ++ rm /tmp/tmp.aBCbrZke50 /tmp/tmp.YrLL9OCrtO ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.tRIqUIOoJX +++ mktemp ++ local LAST_ERR=/tmp/tmp.ENDljYZS17 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.tRIqUIOoJX ++ cat /tmp/tmp.ENDljYZS17 ++ rm /tmp/tmp.tRIqUIOoJX /tmp/tmp.ENDljYZS17 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.62uSNG0wOh ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.Ua8MMzMz1j +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.62uSNG0wOh +++++ cat /tmp/tmp.Ua8MMzMz1j +++++ rm /tmp/tmp.62uSNG0wOh /tmp/tmp.Ua8MMzMz1j +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.iEy0jKuOvo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.ZulAdpHeG6 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.iEy0jKuOvo +++++ cat /tmp/tmp.ZulAdpHeG6 +++++ rm /tmp/tmp.iEy0jKuOvo /tmp/tmp.ZulAdpHeG6 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SfZpd7B50I +++ mktemp ++ local LAST_ERR=/tmp/tmp.OJcvmlBQON ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SfZpd7B50I ++ cat /tmp/tmp.OJcvmlBQON ++ rm /tmp/tmp.SfZpd7B50I /tmp/tmp.OJcvmlBQON ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ejvoDQ9fu7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.48ihoewq34 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ejvoDQ9fu7 ++ cat /tmp/tmp.48ihoewq34 ++ rm /tmp/tmp.ejvoDQ9fu7 /tmp/tmp.48ihoewq34 ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-4.sql /tmp/tmp.nwQ5KJ49HI/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.t6CdUexLoA ++ mktemp + local LAST_ERR=/tmp/tmp.CKHIvdMkvh + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.t6CdUexLoA perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.CKHIvdMkvh + rm /tmp/tmp.t6CdUexLoA /tmp/tmp.CKHIvdMkvh + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.D01UgcGE5k +++ mktemp ++ local LAST_ERR=/tmp/tmp.GnYwPiyK6a ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.D01UgcGE5k ++ cat /tmp/tmp.GnYwPiyK6a ++ rm /tmp/tmp.D01UgcGE5k /tmp/tmp.GnYwPiyK6a ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5B6K9bRI5v +++ mktemp ++ local LAST_ERR=/tmp/tmp.PVPbrUG2hF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5B6K9bRI5v ++ cat /tmp/tmp.PVPbrUG2hF ++ rm /tmp/tmp.5B6K9bRI5v /tmp/tmp.PVPbrUG2hF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kbyL2t8ACW +++ mktemp ++ local LAST_ERR=/tmp/tmp.mAYxszUypw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kbyL2t8ACW ++ cat /tmp/tmp.mAYxszUypw ++ rm /tmp/tmp.kbyL2t8ACW /tmp/tmp.mAYxszUypw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c4gUKOJIBB +++ mktemp ++ local LAST_ERR=/tmp/tmp.X0I8VpCgvr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c4gUKOJIBB ++ cat /tmp/tmp.X0I8VpCgvr ++ rm /tmp/tmp.c4gUKOJIBB /tmp/tmp.X0I8VpCgvr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uHHbWTCcyX +++ mktemp ++ local LAST_ERR=/tmp/tmp.uSIHFXwpvj ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uHHbWTCcyX ++ cat /tmp/tmp.uSIHFXwpvj ++ rm /tmp/tmp.uHHbWTCcyX /tmp/tmp.uSIHFXwpvj ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i876kQ19ne +++ mktemp ++ local LAST_ERR=/tmp/tmp.6snGEcBoJq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i876kQ19ne ++ cat /tmp/tmp.6snGEcBoJq ++ rm /tmp/tmp.i876kQ19ne /tmp/tmp.6snGEcBoJq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lnDGMSIOYt +++ mktemp ++ local LAST_ERR=/tmp/tmp.NfVayhADC0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lnDGMSIOYt ++ cat /tmp/tmp.NfVayhADC0 ++ rm /tmp/tmp.lnDGMSIOYt /tmp/tmp.NfVayhADC0 ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vKT7Cdi1Th +++ mktemp ++ local LAST_ERR=/tmp/tmp.FiYj91DJYX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vKT7Cdi1Th ++ cat /tmp/tmp.FiYj91DJYX ++ rm /tmp/tmp.vKT7Cdi1Th /tmp/tmp.FiYj91DJYX ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.PEIRPJKBKe ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1uvx2Nhrvp +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.PEIRPJKBKe +++++ cat /tmp/tmp.1uvx2Nhrvp +++++ rm /tmp/tmp.PEIRPJKBKe /tmp/tmp.1uvx2Nhrvp +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.A933CKrp8z ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.J9TEUxOM8L +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.A933CKrp8z +++++ cat /tmp/tmp.J9TEUxOM8L +++++ rm /tmp/tmp.A933CKrp8z /tmp/tmp.J9TEUxOM8L +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.fDnyxTY26d +++ mktemp ++ local LAST_ERR=/tmp/tmp.qHlmQRO1X3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.fDnyxTY26d ++ cat /tmp/tmp.qHlmQRO1X3 ++ rm /tmp/tmp.fDnyxTY26d /tmp/tmp.qHlmQRO1X3 ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.dBFapdSUIe ++ mktemp + local LAST_ERR=/tmp/tmp.v7uoynMKtX + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dBFapdSUIe secret/my-cluster-secrets-2 patched + cat /tmp/tmp.v7uoynMKtX + rm /tmp/tmp.dBFapdSUIe /tmp/tmp.v7uoynMKtX + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.pFdccmKp5U +++ mktemp ++ local LAST_ERR=/tmp/tmp.dcfD9nqACT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.pFdccmKp5U ++ cat /tmp/tmp.dcfD9nqACT ++ rm /tmp/tmp.pFdccmKp5U /tmp/tmp.dcfD9nqACT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iRD2ayFHAf +++ mktemp ++ local LAST_ERR=/tmp/tmp.nFlMmbEOoi ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iRD2ayFHAf ++ cat /tmp/tmp.nFlMmbEOoi ++ rm /tmp/tmp.iRD2ayFHAf /tmp/tmp.nFlMmbEOoi ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.CGnAvHU8Pt +++ mktemp ++ local LAST_ERR=/tmp/tmp.eoLgkGNTsE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.CGnAvHU8Pt ++ cat /tmp/tmp.eoLgkGNTsE ++ rm /tmp/tmp.CGnAvHU8Pt /tmp/tmp.eoLgkGNTsE ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.poch53Lyjw +++ mktemp ++ local LAST_ERR=/tmp/tmp.yiqjxPemmB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.poch53Lyjw ++ cat /tmp/tmp.yiqjxPemmB ++ rm /tmp/tmp.poch53Lyjw /tmp/tmp.yiqjxPemmB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.3TbXjky9Gn ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.OtDpx6vVst +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.3TbXjky9Gn +++++ cat /tmp/tmp.OtDpx6vVst +++++ rm /tmp/tmp.3TbXjky9Gn /tmp/tmp.OtDpx6vVst +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.7VfhcsSYPE ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.XEuYvEL5I5 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.7VfhcsSYPE +++++ cat /tmp/tmp.XEuYvEL5I5 +++++ rm /tmp/tmp.7VfhcsSYPE /tmp/tmp.XEuYvEL5I5 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.aB8fVslZNj +++ mktemp ++ local LAST_ERR=/tmp/tmp.yu7MzXUNQw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.aB8fVslZNj ++ cat /tmp/tmp.yu7MzXUNQw ++ rm /tmp/tmp.aB8fVslZNj /tmp/tmp.yu7MzXUNQw ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hu5QfrTgz2 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2PEoOHylPB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hu5QfrTgz2 ++ cat /tmp/tmp.2PEoOHylPB ++ rm /tmp/tmp.hu5QfrTgz2 /tmp/tmp.2PEoOHylPB ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-4.sql /tmp/tmp.nwQ5KJ49HI/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.ikQzzqJ9wr +++ mktemp ++ local LAST_ERR=/tmp/tmp.8ZKLHsVkHl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ikQzzqJ9wr ++ cat /tmp/tmp.8ZKLHsVkHl ++ rm /tmp/tmp.ikQzzqJ9wr /tmp/tmp.8ZKLHsVkHl ++ return 0 + newpass='8q6q,2T.eKqH}u^J-E^' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''8q6q,2T.eKqH}u^J-E^'\'';' '-h some-name-pxc -uroot -p'\''8q6q,2T.eKqH}u^J-E^'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''8q6q,2T.eKqH}u^J-E^'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''8q6q,2T.eKqH}u^J-E^'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lFKbZOzjcu +++ mktemp ++ local LAST_ERR=/tmp/tmp.5y0qxdxV0J ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lFKbZOzjcu ++ cat /tmp/tmp.5y0qxdxV0J ++ rm /tmp/tmp.lFKbZOzjcu /tmp/tmp.5y0qxdxV0J ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''8q6q,2T.eKqH}u^J-E^'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''8q6q,2T.eKqH}u^J-E^'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''8q6q,2T.eKqH}u^J-E^'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''8q6q,2T.eKqH}u^J-E^'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.eimgJ1IdK0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.4npvXsRhBH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.eimgJ1IdK0 ++ cat /tmp/tmp.4npvXsRhBH ++ rm /tmp/tmp.eimgJ1IdK0 /tmp/tmp.4npvXsRhBH ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-4.sql /tmp/tmp.nwQ5KJ49HI/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.upGEdfXoGl +++ mktemp ++ local LAST_ERR=/tmp/tmp.lxKALAJBww ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.upGEdfXoGl ++ cat /tmp/tmp.lxKALAJBww ++ rm /tmp/tmp.upGEdfXoGl /tmp/tmp.lxKALAJBww ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.O8Xxx9EGwb ++ mktemp + local LAST_ERR=/tmp/tmp.1MH441Z4Ky + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.O8Xxx9EGwb secret/my-cluster-secrets-2 configured + cat /tmp/tmp.1MH441Z4Ky Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.O8Xxx9EGwb /tmp/tmp.1MH441Z4Ky + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZZaam67QrZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.0eyfnLwd90 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZZaam67QrZ ++ cat /tmp/tmp.0eyfnLwd90 ++ rm /tmp/tmp.ZZaam67QrZ /tmp/tmp.0eyfnLwd90 ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-4.sql /tmp/tmp.nwQ5KJ49HI/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/conf/some-name.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1750-de45ff75#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.vTh7Gpsu6c ++ mktemp + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + local LAST_ERR=/tmp/tmp.8Qp5uvNmyM + local exit_status=0 + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-9787~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.vTh7Gpsu6c perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.8Qp5uvNmyM + rm /tmp/tmp.vTh7Gpsu6c /tmp/tmp.8Qp5uvNmyM + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.K77WhIpqAN +++ mktemp ++ local LAST_ERR=/tmp/tmp.0sUPLdFp6g ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.K77WhIpqAN ++ cat /tmp/tmp.0sUPLdFp6g ++ rm /tmp/tmp.K77WhIpqAN /tmp/tmp.0sUPLdFp6g ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9C9GkOnyJx +++ mktemp ++ local LAST_ERR=/tmp/tmp.8e4XqIIsrP ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9C9GkOnyJx ++ cat /tmp/tmp.8e4XqIIsrP ++ rm /tmp/tmp.9C9GkOnyJx /tmp/tmp.8e4XqIIsrP ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.F3eNd1CABG +++ mktemp ++ local LAST_ERR=/tmp/tmp.xdTAWVYsFn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.F3eNd1CABG ++ cat /tmp/tmp.xdTAWVYsFn ++ rm /tmp/tmp.F3eNd1CABG /tmp/tmp.xdTAWVYsFn ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yzb72qn3pW +++ mktemp ++ local LAST_ERR=/tmp/tmp.UUdZK1mkTf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yzb72qn3pW ++ cat /tmp/tmp.UUdZK1mkTf ++ rm /tmp/tmp.yzb72qn3pW /tmp/tmp.UUdZK1mkTf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.73HdL6ImW7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.y2UELFWrzo ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.73HdL6ImW7 ++ cat /tmp/tmp.y2UELFWrzo ++ rm /tmp/tmp.73HdL6ImW7 /tmp/tmp.y2UELFWrzo ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uknO4cEXig +++ mktemp ++ local LAST_ERR=/tmp/tmp.EkHAik3rif ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uknO4cEXig ++ cat /tmp/tmp.EkHAik3rif ++ rm /tmp/tmp.uknO4cEXig /tmp/tmp.EkHAik3rif ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vvXjokGX32 +++ mktemp ++ local LAST_ERR=/tmp/tmp.VHdZ5fHsky ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vvXjokGX32 ++ cat /tmp/tmp.VHdZ5fHsky ++ rm /tmp/tmp.vvXjokGX32 /tmp/tmp.VHdZ5fHsky ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rDk5B53e54 +++ mktemp ++ local LAST_ERR=/tmp/tmp.GOssmh8Whr ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rDk5B53e54 ++ cat /tmp/tmp.GOssmh8Whr ++ rm /tmp/tmp.rDk5B53e54 /tmp/tmp.GOssmh8Whr ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.gBQFLjXW14 +++ mktemp ++ local LAST_ERR=/tmp/tmp.DB7BjMlOMl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.gBQFLjXW14 ++ cat /tmp/tmp.DB7BjMlOMl ++ rm /tmp/tmp.gBQFLjXW14 /tmp/tmp.DB7BjMlOMl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Or23hmZAGD +++ mktemp ++ local LAST_ERR=/tmp/tmp.RfLAApH7g8 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Or23hmZAGD ++ cat /tmp/tmp.RfLAApH7g8 ++ rm /tmp/tmp.Or23hmZAGD /tmp/tmp.RfLAApH7g8 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.nDeQUd5oCn +++ mktemp ++ local LAST_ERR=/tmp/tmp.aW5aO5sru4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.nDeQUd5oCn ++ cat /tmp/tmp.aW5aO5sru4 ++ rm /tmp/tmp.nDeQUd5oCn /tmp/tmp.aW5aO5sru4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QJ8f6p0RzV +++ mktemp ++ local LAST_ERR=/tmp/tmp.toPVc1t08v ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QJ8f6p0RzV ++ cat /tmp/tmp.toPVc1t08v ++ rm /tmp/tmp.QJ8f6p0RzV /tmp/tmp.toPVc1t08v ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OEGzisy3ro +++ mktemp ++ local LAST_ERR=/tmp/tmp.KeZc1JWCSu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OEGzisy3ro ++ cat /tmp/tmp.KeZc1JWCSu ++ rm /tmp/tmp.OEGzisy3ro /tmp/tmp.KeZc1JWCSu ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YfnRE14q6Y +++ mktemp ++ local LAST_ERR=/tmp/tmp.soilaXQY7d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YfnRE14q6Y ++ cat /tmp/tmp.soilaXQY7d ++ rm /tmp/tmp.YfnRE14q6Y /tmp/tmp.soilaXQY7d ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LFZvKq6XqU +++ mktemp ++ local LAST_ERR=/tmp/tmp.KaQkDNeDAV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LFZvKq6XqU ++ cat /tmp/tmp.KaQkDNeDAV ++ rm /tmp/tmp.LFZvKq6XqU /tmp/tmp.KaQkDNeDAV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 14 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vDfS4CV7ko +++ mktemp ++ local LAST_ERR=/tmp/tmp.izNAN39hkA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vDfS4CV7ko ++ cat /tmp/tmp.izNAN39hkA ++ rm /tmp/tmp.vDfS4CV7ko /tmp/tmp.izNAN39hkA ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.m4yr9Odvta +++ mktemp ++ local LAST_ERR=/tmp/tmp.Y1qOFKJWRw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.m4yr9Odvta ++ cat /tmp/tmp.Y1qOFKJWRw ++ rm /tmp/tmp.m4yr9Odvta /tmp/tmp.Y1qOFKJWRw ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.vSHE5J9AWi ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.8IiCWkt3oe +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.vSHE5J9AWi +++++ cat /tmp/tmp.8IiCWkt3oe +++++ rm /tmp/tmp.vSHE5J9AWi /tmp/tmp.8IiCWkt3oe +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lJmA83QbjG +++ mktemp ++ local LAST_ERR=/tmp/tmp.XdKWxTt9EI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lJmA83QbjG ++ cat /tmp/tmp.XdKWxTt9EI ++ rm /tmp/tmp.lJmA83QbjG /tmp/tmp.XdKWxTt9EI ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.l3F51nvAck ++ mktemp + local LAST_ERR=/tmp/tmp.ULVKcgnCxt + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.l3F51nvAck secret/my-cluster-secrets patched + cat /tmp/tmp.ULVKcgnCxt + rm /tmp/tmp.l3F51nvAck /tmp/tmp.ULVKcgnCxt + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.yQ1NoJyq9p +++ mktemp ++ local LAST_ERR=/tmp/tmp.EEFaxIxqkB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.yQ1NoJyq9p ++ cat /tmp/tmp.EEFaxIxqkB ++ rm /tmp/tmp.yQ1NoJyq9p /tmp/tmp.EEFaxIxqkB ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zP4AbLBwXs +++ mktemp ++ local LAST_ERR=/tmp/tmp.QpCtepIbmV ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zP4AbLBwXs ++ cat /tmp/tmp.QpCtepIbmV ++ rm /tmp/tmp.zP4AbLBwXs /tmp/tmp.QpCtepIbmV ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.smIzElqJXZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.9wTVlyll4m ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.smIzElqJXZ ++ cat /tmp/tmp.9wTVlyll4m ++ rm /tmp/tmp.smIzElqJXZ /tmp/tmp.9wTVlyll4m ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.i0R7Vkd5VN +++ mktemp ++ local LAST_ERR=/tmp/tmp.Fzaemitz8r ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.i0R7Vkd5VN ++ cat /tmp/tmp.Fzaemitz8r ++ rm /tmp/tmp.i0R7Vkd5VN /tmp/tmp.Fzaemitz8r ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7WqXTqYnDF +++ mktemp ++ local LAST_ERR=/tmp/tmp.UJwQxlxyBG ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7WqXTqYnDF ++ cat /tmp/tmp.UJwQxlxyBG ++ rm /tmp/tmp.7WqXTqYnDF /tmp/tmp.UJwQxlxyBG ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.Eiuo6YhgI0 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.tkjL3J4JsE +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.Eiuo6YhgI0 +++++ cat /tmp/tmp.tkjL3J4JsE +++++ rm /tmp/tmp.Eiuo6YhgI0 /tmp/tmp.tkjL3J4JsE +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TI3vg8C8lX +++ mktemp ++ local LAST_ERR=/tmp/tmp.MGbwUr6kgg ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TI3vg8C8lX ++ cat /tmp/tmp.MGbwUr6kgg ++ rm /tmp/tmp.TI3vg8C8lX /tmp/tmp.MGbwUr6kgg ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cpPkd1lhcs +++ mktemp ++ local LAST_ERR=/tmp/tmp.JetE5n8KKw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cpPkd1lhcs ++ cat /tmp/tmp.JetE5n8KKw ++ rm /tmp/tmp.cpPkd1lhcs /tmp/tmp.JetE5n8KKw ++ return 0 + client_pod=pxc-client-64b479df95-6298g + wait_pod pxc-client-64b479df95-6298g + local pod=pxc-client-64b479df95-6298g + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-6298g ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-6298g condition met pxc-client-64b479df95-6298g.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.nwQ5KJ49HI/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1750/e2e-tests/users/compare/select-3.sql /tmp/tmp.nwQ5KJ49HI/select-3.sql + destroy users-9787 + local namespace=users-9787 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info + grep -v 'the object has been modified' + grep -v 'get backup status: Job.batch' ++ get_operator_pod + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' ++ local label_prefix=app.kubernetes.io/ + sort -u + tee /tmp/tmp.nwQ5KJ49HI/operator.log +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.2LNP9Uq8yo +++ mktemp ++ local LAST_ERR=/tmp/tmp.7Y2kTWk7LX ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2LNP9Uq8yo ++ cat /tmp/tmp.7Y2kTWk7LX ++ rm /tmp/tmp.2LNP9Uq8yo /tmp/tmp.7Y2kTWk7LX ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-7c5cdd5dd9-mtsvq ++ mktemp + local LAST_OUT=/tmp/tmp.pCvje1wwvg ++ mktemp + local LAST_ERR=/tmp/tmp.cTJMKKnief + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-7c5cdd5dd9-mtsvq + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pCvje1wwvg + cat /tmp/tmp.cTJMKKnief + rm /tmp/tmp.pCvje1wwvg /tmp/tmp.cTJMKKnief + return 0 2024-07-05T09:29:47.037Z INFO setup Manager starting up {"gitCommit": "de45ff75d55a19b5bd0fa01ec5862ada3c8f0243", "gitBranch": "PR-1750-de45ff75", "buildTime": "2024-07-05T07:35:25Z", "goVersion": "go1.22.5", "os": "linux", "arch": "amd64"} 2024-07-05T09:29:47.037Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1469001"} 2024-07-05T09:29:47.038Z INFO setup Registering Components. 2024-07-05T09:29:49.632Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-07-05T09:29:49.636Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-07-05T09:29:49.636Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-07-05T09:29:49.636Z INFO controller-runtime.metrics Starting metrics server 2024-07-05T09:29:49.636Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-07-05T09:29:49.636Z INFO controller-runtime.webhook Starting webhook server 2024-07-05T09:29:49.636Z INFO setup Starting the Cmd. 2024-07-05T09:29:49.636Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-07-05T09:29:49.637Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-07-05T09:29:49.737Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-07-05T09:29:49.755Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-07-05T09:29:49.756Z DEBUG events percona-xtradb-cluster-operator-7c5cdd5dd9-mtsvq_5a05f623-9059-49d3-b910-772167c4aa97 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"df610fa5-c7cc-47d0-8935-17be446320be","apiVersion":"coordination.k8s.io/v1","resourceVersion":"61924"}, "reason": "LeaderElection"} 2024-07-05T09:29:49.756Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-07-05T09:29:49.756Z INFO Starting Controller {"controller": "pxc-controller"} 2024-07-05T09:29:49.756Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-07-05T09:29:49.756Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: *v1.PerconaXtraDBClusterBackup"} 2024-07-05T09:29:49.756Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: *v1.PerconaXtraDBCluster"} 2024-07-05T09:29:49.756Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: *v1.PerconaXtraDBClusterRestore"} 2024-07-05T09:29:49.972Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-07-05T09:29:49.972Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-07-05T09:29:49.972Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-07-05T09:30:18.441Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "cd6cf4d4-01d7-43e1-a54d-73c143e51dbf", "version": "1.15.0"} 2024-07-05T09:31:33.723Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "3715e34d-53b5-43fc-b9f8-4366afa3d2f9", "user": "operator"} 2024-07-05T09:31:33.759Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "3715e34d-53b5-43fc-b9f8-4366afa3d2f9", "user": "monitor"} 2024-07-05T09:31:33.804Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "3715e34d-53b5-43fc-b9f8-4366afa3d2f9"} 2024-07-05T09:31:33.857Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "3715e34d-53b5-43fc-b9f8-4366afa3d2f9", "user": "xtrabackup"} 2024-07-05T09:31:33.917Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "3715e34d-53b5-43fc-b9f8-4366afa3d2f9"} 2024-07-05T09:31:34.030Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "3715e34d-53b5-43fc-b9f8-4366afa3d2f9", "err": "get primary pxc pod: not found"} 2024-07-05T09:31:38.795Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "cb2aa4fb-eeac-4b2d-87b9-6a9b4fe2f46e", "err": "get primary pxc pod: not found"} 2024-07-05T09:31:44.046Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "ba4b964b-7c09-4ecf-ba6e-256aea1fed33", "err": "get primary pxc pod: not found"} 2024-07-05T09:31:49.245Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "056f56a7-ca41-4627-9627-f93b9f985e14", "err": "get primary pxc pod: not found"} 2024-07-05T09:34:01.619Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "f53b26e5-b5d7-460e-9ae8-2a26be434350", "user": "root"} 2024-07-05T09:34:01.665Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "f53b26e5-b5d7-460e-9ae8-2a26be434350", "user": "replication"} 2024-07-05T09:34:01.878Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "f53b26e5-b5d7-460e-9ae8-2a26be434350", "new version": "5.7.44-48-57"} 2024-07-05T09:34:05.540Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "f53b26e5-b5d7-460e-9ae8-2a26be434350"} 2024-07-05T09:34:10.031Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "5f7365a9-5fb7-4895-b815-69a07f8e99d3"} 2024-07-05T09:34:15.403Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "4cdd9c54-f1a1-4142-ae85-ab7d55bd608c"} 2024-07-05T09:34:20.942Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "6f1d5f5f-b43a-4d7d-b80a-add0b73bbcb6"} 2024-07-05T09:34:26.394Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "01ce8c5b-4e54-4d91-b12e-0a75471b0682"} 2024-07-05T09:34:31.513Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "dd7d4690-af8b-41a0-97aa-fbe91befac21"} 2024-07-05T09:34:36.698Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "12fe5262-f41d-4995-8e6d-9a5b56762a48"} 2024-07-05T09:34:42.004Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "f60e2ce7-207f-4403-b42f-16cd1f4c6a0b"} 2024-07-05T09:34:47.339Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "02585257-1000-4331-a391-fadc9a98f3e0"} 2024-07-05T09:34:52.504Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "3d21ad96-fd63-4c80-b13e-01c9c5ee1884"} 2024-07-05T09:34:57.700Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "693acff2-96a0-4034-a7a2-4e2b8a6a38f8"} 2024-07-05T09:35:03.021Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "479404e2-bcb8-440e-9621-4ec25b9ce5ee"} 2024-07-05T09:35:08.215Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "e07552c0-2e70-45b5-92b5-6f16b8e3967f"} 2024-07-05T09:35:13.713Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "5194eda6-2d1e-4d96-8547-be28c0329275"} 2024-07-05T09:35:15.735Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "5bbd7fe7-572c-43d1-90a6-dbe4aed7f9b4", "user": "root"} 2024-07-05T09:35:15.774Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "5bbd7fe7-572c-43d1-90a6-dbe4aed7f9b4", "user": "root"} 2024-07-05T09:35:15.793Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "5bbd7fe7-572c-43d1-90a6-dbe4aed7f9b4", "secret": "some-name-mysql-init", "user": "root"} 2024-07-05T09:35:21.028Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "5bbd7fe7-572c-43d1-90a6-dbe4aed7f9b4"} 2024-07-05T09:35:21.040Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "5bbd7fe7-572c-43d1-90a6-dbe4aed7f9b4", "user": "root"} 2024-07-05T09:35:24.694Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "5bbd7fe7-572c-43d1-90a6-dbe4aed7f9b4"} 2024-07-05T09:35:29.941Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "5e0d5816-6a85-40b2-a93d-1796bcf74bad"} 2024-07-05T09:35:34.811Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "3bf4c6c0-62f7-49c3-8c43-79dc2b5a4ca1", "error": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / ", "errorVerbose": "exec syncusers: pod some-name-proxysql-2 does not have a host assigned / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-05T09:35:57.524Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "553dc597-7ace-46b8-8e85-fb040f871c1b", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-05T09:35:58.277Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "da6004bb-2ee7-4b82-a5f9-870783f427f8", "user": "proxyadmin"} 2024-07-05T09:35:58.277Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "da6004bb-2ee7-4b82-a5f9-870783f427f8", "user": "proxyadmin"} 2024-07-05T09:35:58.349Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "da6004bb-2ee7-4b82-a5f9-870783f427f8", "user": "proxyadmin"} 2024-07-05T09:35:58.359Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "da6004bb-2ee7-4b82-a5f9-870783f427f8", "user": "proxyadmin"} 2024-07-05T09:35:58.359Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "da6004bb-2ee7-4b82-a5f9-870783f427f8", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-07-05T09:35:58.561Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "da6004bb-2ee7-4b82-a5f9-870783f427f8", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-05T09:36:57.549Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "ca6cf41d-fad2-4827-811e-54eb306739bc"} 2024-07-05T09:37:08.140Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "6a7e72fc-c691-4e88-81d5-ca65c451c2d9"} 2024-07-05T09:37:12.301Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "4a788ee8-05c3-4d3a-9cc7-27afcde737ff", "user": "xtrabackup"} 2024-07-05T09:37:12.328Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "4a788ee8-05c3-4d3a-9cc7-27afcde737ff", "user": "xtrabackup"} 2024-07-05T09:37:12.338Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "4a788ee8-05c3-4d3a-9cc7-27afcde737ff", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-05T09:37:12.351Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "4a788ee8-05c3-4d3a-9cc7-27afcde737ff", "user": "xtrabackup"} 2024-07-05T09:37:12.351Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "4a788ee8-05c3-4d3a-9cc7-27afcde737ff", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-07-05T09:37:14.070Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "ba451803-4ef7-42ba-90d0-df6b2c7fe4ed"} 2024-07-05T09:38:56.942Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "93ee3ac0-d0e7-487f-ba9a-80b74da5aabc", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-9787 on 10.128.0.10:53: no such host"} 2024-07-05T09:39:47.611Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "2ef3ce2f-5715-431c-b479-5a866387558b"} 2024-07-05T09:39:52.610Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "423b1aa7-e60b-424a-b717-6c2d34a0b3b8"} 2024-07-05T09:39:57.902Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "dcbfdc0e-d613-4b82-bfd0-e2b0e9ddb904"} 2024-07-05T09:40:03.141Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "9272e838-eed9-4ed4-8339-8d8fa34ef1a3"} 2024-07-05T09:40:04.885Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "7b279116-13e9-4a4e-b3f0-cb5674cddaff", "user": "monitor"} 2024-07-05T09:40:04.914Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "7b279116-13e9-4a4e-b3f0-cb5674cddaff", "user": "monitor"} 2024-07-05T09:40:04.922Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "7b279116-13e9-4a4e-b3f0-cb5674cddaff", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-05T09:40:04.971Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "7b279116-13e9-4a4e-b3f0-cb5674cddaff", "user": "monitor"} 2024-07-05T09:40:04.984Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "7b279116-13e9-4a4e-b3f0-cb5674cddaff", "user": "monitor"} 2024-07-05T09:40:04.985Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "7b279116-13e9-4a4e-b3f0-cb5674cddaff", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-07-05T09:40:07.826Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "7b279116-13e9-4a4e-b3f0-cb5674cddaff", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-05T09:40:49.468Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "3b69f865-8b85-4fea-b890-ad810c5739a3"} 2024-07-05T09:40:54.038Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "3a0535d8-6cab-41e9-97b4-56a4d39759cb"} 2024-07-05T09:40:59.480Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "467ec9eb-3c4b-4d4d-a91e-145790afb483"} 2024-07-05T09:41:05.160Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "52d39225-12d0-4a76-8f53-a2e65a444bd7"} 2024-07-05T09:41:09.927Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "8f5d8421-566e-476c-9e25-67fd4a4e845d"} 2024-07-05T09:41:11.702Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "045ab4a8-a886-45bf-a613-aaae35d4bb49", "user": "operator"} 2024-07-05T09:41:11.729Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "045ab4a8-a886-45bf-a613-aaae35d4bb49", "user": "operator"} 2024-07-05T09:41:11.737Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "045ab4a8-a886-45bf-a613-aaae35d4bb49", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-05T09:41:11.749Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "045ab4a8-a886-45bf-a613-aaae35d4bb49", "user": "operator"} 2024-07-05T09:41:11.749Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "045ab4a8-a886-45bf-a613-aaae35d4bb49", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-05T09:41:13.050Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "045ab4a8-a886-45bf-a613-aaae35d4bb49", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9787.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9787.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9787.svc.c' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9787.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9787.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9787.svc.c' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-05T09:41:40.304Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "dbe3d0e2-2704-4554-abdd-e2bc31b1d877"} 2024-07-05T09:41:48.924Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "ca1b09fc-64bb-48e9-a3ec-7da089c8be8d"} 2024-07-05T09:41:51.461Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "10e532a6-9d82-4bdb-b222-2e8046b19181", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:1297) : Could not find any nodes belonging to the cluster with writer hostgroup:11\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:1297) : Could not find any nodes belonging to the cluster with writer hostgroup:11\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-05T09:41:59.436Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "913ed41f-3a8c-4a0b-83e4-203c0e05976f"} 2024-07-05T09:42:05.079Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "8153146d-3012-4f1f-9bf9-8c1b505dc153"} 2024-07-05T09:42:10.278Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "886db916-39c1-4816-b82d-1c7f20aca10f"} 2024-07-05T09:42:10.917Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "secrets": "my-cluster-secrets-2"} 2024-07-05T09:42:10.917Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "root"} 2024-07-05T09:42:10.959Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "root"} 2024-07-05T09:42:10.977Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "secret": "some-name-mysql-init", "user": "root"} 2024-07-05T09:42:16.577Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197"} 2024-07-05T09:42:16.587Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "root"} 2024-07-05T09:42:16.588Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "operator"} 2024-07-05T09:42:16.615Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "operator"} 2024-07-05T09:42:16.625Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-05T09:42:16.635Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "operator"} 2024-07-05T09:42:16.635Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "monitor"} 2024-07-05T09:42:16.664Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "monitor"} 2024-07-05T09:42:16.672Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-05T09:42:16.718Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "monitor"} 2024-07-05T09:42:16.730Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "monitor"} 2024-07-05T09:42:16.730Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "xtrabackup"} 2024-07-05T09:42:16.757Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "xtrabackup"} 2024-07-05T09:42:16.768Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-05T09:42:16.779Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "xtrabackup"} 2024-07-05T09:42:16.779Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "replication"} 2024-07-05T09:42:16.834Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "replication"} 2024-07-05T09:42:16.844Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-05T09:42:16.853Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "replication"} 2024-07-05T09:42:16.853Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "proxyadmin"} 2024-07-05T09:42:16.900Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "proxyadmin"} 2024-07-05T09:42:16.916Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "user": "proxyadmin"} 2024-07-05T09:42:16.917Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "last-applied-secret": "18a696567d72faac69b694ee5cb3b9ae5edfd7d1c1f4e1bb2e84f4d95a9f0d8a"} 2024-07-05T09:42:16.917Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "last-applied-secret": "18a696567d72faac69b694ee5cb3b9ae5edfd7d1c1f4e1bb2e84f4d95a9f0d8a"} 2024-07-05T09:42:17.221Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b637d709-3b18-4ec3-aaa3-ee8e39cb2197", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-05T09:44:05.525Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "7db444ef-7f59-4255-af5e-c16aed5ed020", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.85.33.58:33062: connect: connection refused"} 2024-07-05T09:44:11.326Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "a33091d4-47f4-4753-a859-c2af862047d5", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:44:11.737Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b5ce68fa-330c-4144-95bc-42b409e26d49", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:44:16.616Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "32c91a20-d4bb-4407-bb16-b353900048c1", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:44:21.778Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "0b7ae491-eb74-48db-b15b-5be506447eda", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:44:26.994Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "6dd830fb-c8de-4c96-8de2-1fd124a86319", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:44:32.589Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "e7cd5330-6927-4a81-a2d1-f90e1ec7f80d", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:44:37.953Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "0a274749-2f0f-4a35-a609-8ad93d28bda4", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:44:43.120Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "33a2574e-c685-4afa-ad28-868d737b33d0", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:44:48.317Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "2f1cd05e-d029-4897-b59c-ab57a1ce6ff8", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:44:57.635Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "45e6e0b4-128d-4dcf-86b5-51823120b925"} 2024-07-05T09:44:58.785Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "57403b67-da50-4187-b236-188cadb0acd4", "user": "operator"} 2024-07-05T09:44:58.814Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "57403b67-da50-4187-b236-188cadb0acd4", "user": "operator"} 2024-07-05T09:44:58.829Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "57403b67-da50-4187-b236-188cadb0acd4", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-05T09:44:58.842Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "57403b67-da50-4187-b236-188cadb0acd4", "user": "operator"} 2024-07-05T09:44:58.843Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "57403b67-da50-4187-b236-188cadb0acd4", "last-applied-secret": "795a77576155c3fe8ad676f2baf09b1b9c2f945f326aac1112897ac5cb1204e8"} 2024-07-05T09:45:00.315Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "57403b67-da50-4187-b236-188cadb0acd4", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9787.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9787.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9787.svc.c' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9787.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9787.svc.c' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-9787.svc.c' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-05T09:45:55.422Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "a3273cd5-fa3e-4fbb-9ae4-d186b0e31155"} 2024-07-05T09:46:03.547Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "2f3b0b41-0ca2-45c7-823a-ef63e925c400"} 2024-07-05T09:46:08.858Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "e3eaa8c6-13ae-4b93-9925-5139a16a752c"} 2024-07-05T09:46:14.438Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "008801e4-466e-4a4e-a8d0-6920a24f32ab"} 2024-07-05T09:46:19.741Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "a5c70c23-6af4-420e-bf3d-59bee1eec60f"} 2024-07-05T09:46:24.973Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "a7edb65e-6948-4f5f-bcd7-9b81c5b71721"} 2024-07-05T09:46:30.260Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "e552bbed-8555-4b8e-a4fe-2b6bcc1f2777"} 2024-07-05T09:46:37.228Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "6183f37e-b84b-4198-9ef9-0c97a57cd65e"} 2024-07-05T09:46:41.140Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "da2d92e1-0cc5-47c9-beb3-504cfc72f655"} 2024-07-05T09:46:46.372Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "403506b1-758e-4d18-891a-458e3d2b5148"} 2024-07-05T09:46:51.748Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "142e96bd-082c-45af-affe-4814e8588559"} 2024-07-05T09:46:57.076Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "38252e2e-ad18-487b-ae8a-efb1f0a7769d"} 2024-07-05T09:47:02.366Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "8d9ba521-67fa-4246-9808-a5d6f9493afd"} 2024-07-05T09:47:07.475Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "6b9a4542-b2b0-486d-bbdd-6a2cd6e37e3f"} 2024-07-05T09:47:13.052Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "89a361c5-3e3b-451a-97f8-80078e3c1539"} 2024-07-05T09:47:18.252Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "8e6039eb-1b57-4b4f-a19a-3274aa61c60d"} 2024-07-05T09:47:20.058Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "root"} 2024-07-05T09:47:20.095Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "root"} 2024-07-05T09:47:20.105Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "secret": "some-name-mysql-init", "user": "root"} 2024-07-05T09:47:25.589Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171"} 2024-07-05T09:47:25.604Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "root"} 2024-07-05T09:47:25.604Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "monitor"} 2024-07-05T09:47:25.632Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "monitor"} 2024-07-05T09:47:25.652Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-05T09:47:25.697Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "monitor"} 2024-07-05T09:47:25.709Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "monitor"} 2024-07-05T09:47:25.709Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "xtrabackup"} 2024-07-05T09:47:25.738Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "xtrabackup"} 2024-07-05T09:47:25.751Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-05T09:47:25.767Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "xtrabackup"} 2024-07-05T09:47:25.767Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "proxyadmin"} 2024-07-05T09:47:25.814Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "proxyadmin"} 2024-07-05T09:47:25.833Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "user": "proxyadmin"} 2024-07-05T09:47:25.833Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "last-applied-secret": "54ca67aabf877e0861e9c11196ba67d0f2418c212d6a6c2a8c89af90c7af4bc2"} 2024-07-05T09:47:25.833Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "last-applied-secret": "54ca67aabf877e0861e9c11196ba67d0f2418c212d6a6c2a8c89af90c7af4bc2"} 2024-07-05T09:47:26.389Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "81f40594-a299-4711-bb5f-070ec0b13171", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:915\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-07-05T09:49:12.639Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "36369e17-2c73-4114-bc3c-c35e376a3e58", "err": "failed to connect to pod some-name-pxc-0: dial tcp 10.85.33.62:33062: connect: connection refused"} 2024-07-05T09:49:17.849Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "d10e802d-9cf1-4b0e-9258-55dd73194a09", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:49:23.034Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "7b756df4-2cf9-41b1-bbad-3f2275eedc56", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:49:28.342Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "de6ca566-4bad-4def-98d4-30a91f74d0ce", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:49:33.523Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "31ba8cd8-6b21-4403-a0d4-f972d46cd62d", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:49:38.744Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "b9f4402f-d4a7-4d5c-b03b-82d4054db46f", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:49:43.940Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "e88042c1-d226-471b-a47c-22236ccf2ed2", "primary name": "some-name-pxc-0.some-name-pxc.users-9787.svc.cluster.local"} 2024-07-05T09:49:52.243Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 5eb6bea0-bdea-413c-8960-5945911cdb3a 2024-07-05T09:49:55.613Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "26cf718b-ad5c-4c70-a91b-4a1dcd49de2e", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.128.15.105:3306: connect: connection refused"} 2024-07-05T09:52:38.533Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "root"} 2024-07-05T09:52:38.575Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "root"} 2024-07-05T09:52:38.585Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "secret": "some-name-mysql-init", "user": "root"} 2024-07-05T09:52:38.594Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "root"} 2024-07-05T09:52:38.595Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "operator"} 2024-07-05T09:52:38.617Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "operator"} 2024-07-05T09:52:38.629Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "secret": "some-name-mysql-init", "user": "operator"} 2024-07-05T09:52:38.640Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "operator"} 2024-07-05T09:52:38.640Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "monitor"} 2024-07-05T09:52:38.661Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "monitor"} 2024-07-05T09:52:38.671Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-05T09:52:38.681Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "monitor"} 2024-07-05T09:52:38.681Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "xtrabackup"} 2024-07-05T09:52:38.704Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "xtrabackup"} 2024-07-05T09:52:38.716Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-07-05T09:52:38.730Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "xtrabackup"} 2024-07-05T09:52:38.731Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "replication"} 2024-07-05T09:52:38.753Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "replication"} 2024-07-05T09:52:38.763Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "secret": "some-name-mysql-init", "user": "replication"} 2024-07-05T09:52:38.773Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "user": "replication"} 2024-07-05T09:52:38.773Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-05T09:52:38.773Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "c36ea7d8-e444-46b9-ba78-2ceca0aee4f0", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-07-05T09:55:31.992Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "884de135-cf9c-4982-9bef-a0a6f7839d69", "user": "monitor"} 2024-07-05T09:55:32.020Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "884de135-cf9c-4982-9bef-a0a6f7839d69", "user": "monitor"} 2024-07-05T09:55:32.030Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "884de135-cf9c-4982-9bef-a0a6f7839d69", "secret": "some-name-mysql-init", "user": "monitor"} 2024-07-05T09:55:32.042Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "884de135-cf9c-4982-9bef-a0a6f7839d69", "user": "monitor"} 2024-07-05T09:55:32.042Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-9787", "name": "some-name", "reconcileID": "884de135-cf9c-4982-9bef-a0a6f7839d69", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.4/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/07/05 09:51:56 connection.go:49: read tcp 10.85.33.53:50322->10.128.15.105:3306: i/o timeout [mysql] 2024/07/05 09:52:06 connection.go:49: unexpected EOF sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-9787 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.2PNOV1tufA ++ mktemp + local LAST_ERR=/tmp/tmp.lkBteGlK63 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2PNOV1tufA perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.lkBteGlK63 + rm /tmp/tmp.2PNOV1tufA /tmp/tmp.lkBteGlK63 + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.zd5Au6Nx71 ++ mktemp + local LAST_ERR=/tmp/tmp.BtEnAYkcX0 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.zd5Au6Nx71 No resources found + cat /tmp/tmp.BtEnAYkcX0 + rm /tmp/tmp.zd5Au6Nx71 /tmp/tmp.BtEnAYkcX0 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.YioGLzGpmz ++ mktemp + local LAST_ERR=/tmp/tmp.h7N49NM4tx + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.YioGLzGpmz No resources found + cat /tmp/tmp.h7N49NM4tx + rm /tmp/tmp.YioGLzGpmz /tmp/tmp.h7N49NM4tx + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.3hI3sbmHvq ++ mktemp + local LAST_ERR=/tmp/tmp.27dTM4xfHZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.3hI3sbmHvq validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.27dTM4xfHZ + rm /tmp/tmp.3hI3sbmHvq /tmp/tmp.27dTM4xfHZ + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-9787 + rm -rf /tmp/tmp.nwQ5KJ49HI + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.STqbl5BAq7 + local LAST_OUT=/tmp/tmp.ypKMkS1PGj ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.qsxmSchxjM + local exit_status=0 + local LAST_ERR=/tmp/tmp.boLcCCy82Y + local exit_status=0 ++ seq 0 2 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-9787