Log: /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/logs/users-5-7.log WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 WARNING: version difference between client (1.30) and server (1.26) exceeds the supported minor version skew of +/-1 + create_infra users-14491 + local ns=users-14491 + '[' -n pxc-operator ']' + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-24459 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Z0DwDjADWm ++ mktemp + local LAST_ERR=/tmp/tmp.X4W1gkfSLr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Z0DwDjADWm perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.X4W1gkfSLr + rm /tmp/tmp.Z0DwDjADWm /tmp/tmp.X4W1gkfSLr + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.psfPmw2Cb5 ++ mktemp + local LAST_ERR=/tmp/tmp.ibnsVY0Gf8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.psfPmw2Cb5 No resources found + cat /tmp/tmp.ibnsVY0Gf8 + rm /tmp/tmp.psfPmw2Cb5 /tmp/tmp.ibnsVY0Gf8 + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.z5CqlMYEgW ++ mktemp + local LAST_ERR=/tmp/tmp.VCeqgreaFV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.z5CqlMYEgW No resources found + cat /tmp/tmp.VCeqgreaFV + rm /tmp/tmp.z5CqlMYEgW /tmp/tmp.VCeqgreaFV + return 0 + create_namespace pxc-operator + local namespace=pxc-operator + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ awk '{print $1}' ++ grep chaos-mesh ++ kubectl get MutatingWebhookConfiguration + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces pxc-operator ----------------------------------------------------------------------------------- ++ mktemp + kubectl_bin delete namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.Hh9xoAosJU + local LAST_OUT=/tmp/tmp.dwpZ5yBFze ++ mktemp ++ mktemp + xargs kubectl delete ns + local LAST_ERR=/tmp/tmp.GYOTIbYbdY + local exit_status=0 ++ seq 0 2 + local LAST_ERR=/tmp/tmp.Bmlq2FURQd + local exit_status=0 + for i in '$(seq 0 2)' + set +e ++ seq 0 2 + kubectl get ns + for i in '$(seq 0 2)' + set +e + kubectl delete namespace pxc-operator + awk '{print$1}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Hh9xoAosJU + cat /tmp/tmp.GYOTIbYbdY + rm /tmp/tmp.Hh9xoAosJU /tmp/tmp.GYOTIbYbdY + return 0 namespace "users-24459" deleted Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.dwpZ5yBFze namespace "pxc-operator" deleted + cat /tmp/tmp.Bmlq2FURQd + rm /tmp/tmp.dwpZ5yBFze /tmp/tmp.Bmlq2FURQd + return 0 + wait_for_delete namespace/pxc-operator + local res=namespace/pxc-operator + echo -n 'namespace/pxc-operator - ' namespace/pxc-operator - + set +o xtrace Error from server (NotFound): namespaces "pxc-operator" not found + desc 'create namespace pxc-operator' + set +o xtrace ----------------------------------------------------------------------------------- create namespace pxc-operator ----------------------------------------------------------------------------------- + kubectl_bin create namespace pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.OB6lZMzv7E ++ mktemp + local LAST_ERR=/tmp/tmp.azUcfjILCj + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.OB6lZMzv7E namespace/pxc-operator created + cat /tmp/tmp.azUcfjILCj + rm /tmp/tmp.OB6lZMzv7E /tmp/tmp.azUcfjILCj + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.Vih2w6kSqQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.b3ks2tszhy ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Vih2w6kSqQ ++ cat /tmp/tmp.b3ks2tszhy ++ rm /tmp/tmp.Vih2w6kSqQ /tmp/tmp.b3ks2tszhy ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1730-852bae96-1-cluster8 --namespace=pxc-operator ++ mktemp + local LAST_OUT=/tmp/tmp.jK6yr4lyun ++ mktemp + local LAST_ERR=/tmp/tmp.3k5ALJ5sP8 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1730-852bae96-1-cluster8 --namespace=pxc-operator + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.jK6yr4lyun Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1730-852bae96-1-cluster8" modified. + cat /tmp/tmp.3k5ALJ5sP8 + rm /tmp/tmp.jK6yr4lyun /tmp/tmp.3k5ALJ5sP8 + return 0 + deploy_operator + desc 'start PXC operator' + set +o xtrace ----------------------------------------------------------------------------------- start PXC operator ----------------------------------------------------------------------------------- + kubectl_bin apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/deploy/crd.yaml ++ mktemp + local LAST_OUT=/tmp/tmp.Cb3BeryPjC ++ mktemp + local LAST_ERR=/tmp/tmp.vZMWqYifkF + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply --server-side --force-conflicts -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/deploy/crd.yaml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Cb3BeryPjC customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterbackups.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusterrestores.pxc.percona.com serverside-applied customresourcedefinition.apiextensions.k8s.io/perconaxtradbclusters.pxc.percona.com serverside-applied + cat /tmp/tmp.vZMWqYifkF + rm /tmp/tmp.Cb3BeryPjC /tmp/tmp.vZMWqYifkF + return 0 + '[' -n pxc-operator ']' + apply_rbac cw-rbac + local operator_namespace=pxc-operator + local rbac=cw-rbac + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/deploy/cw-rbac.yaml + sed -e 's^namespace: .*^namespace: pxc-operator^' + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.USLaWK1FWD ++ mktemp + local LAST_ERR=/tmp/tmp.GQsmuUhghN + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.USLaWK1FWD clusterrole.rbac.authorization.k8s.io/percona-xtradb-cluster-operator unchanged serviceaccount/percona-xtradb-cluster-operator created clusterrolebinding.rbac.authorization.k8s.io/service-account-percona-xtradb-cluster-operator unchanged + cat /tmp/tmp.GQsmuUhghN + rm /tmp/tmp.USLaWK1FWD /tmp/tmp.GQsmuUhghN + return 0 + sed -e 's^image: .*^image: perconalab/percona-xtradb-cluster-operator:PR-1730-852bae96^' + sed -e 's^failureThreshold: .*^failureThreshold: 10^' + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/deploy/cw-operator.yaml + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "DISABLE_TELEMETRY").value) = "true"' - + kubectl_bin apply -f - ++ mktemp + local LAST_OUT=/tmp/tmp.yRRFL8M3p2 ++ mktemp + yq eval '(select(.kind == "Deployment").spec.template.spec.containers[] | select(.name == "percona-xtradb-cluster-operator").env[] | select(.name == "LOG_LEVEL").value) = "DEBUG"' - + local LAST_ERR=/tmp/tmp.np73Gn2PNv + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.yRRFL8M3p2 deployment.apps/percona-xtradb-cluster-operator created service/percona-xtradb-cluster-operator created + cat /tmp/tmp.np73Gn2PNv + rm /tmp/tmp.yRRFL8M3p2 /tmp/tmp.np73Gn2PNv + return 0 + sleep 10 + kubectl_bin wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s ++ mktemp + local LAST_OUT=/tmp/tmp.Ud7KOD5jlK ++ mktemp + local LAST_ERR=/tmp/tmp.4GSdmqr9pg + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pods -l app.kubernetes.io/component=operator,app.kubernetes.io/instance=percona-xtradb-cluster-operator,app.kubernetes.io/name=percona-xtradb-cluster-operator --timeout=30s + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Ud7KOD5jlK pod/percona-xtradb-cluster-operator-784d88b77-d7957 condition met + cat /tmp/tmp.4GSdmqr9pg + rm /tmp/tmp.Ud7KOD5jlK /tmp/tmp.4GSdmqr9pg + return 0 ++ get_operator_pod ++ local label_prefix=app.kubernetes.io/ +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.RfD6O7TIrs +++ mktemp ++ local LAST_ERR=/tmp/tmp.1hyp50Mfil ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RfD6O7TIrs ++ cat /tmp/tmp.1hyp50Mfil ++ rm /tmp/tmp.RfD6O7TIrs /tmp/tmp.1hyp50Mfil ++ return 0 + wait_pod percona-xtradb-cluster-operator-784d88b77-d7957 480 pxc-operator + local pod=percona-xtradb-cluster-operator-784d88b77-d7957 + local max_retry=480 + local ns=pxc-operator ++ echo percona-xtradb-cluster-operator-784d88b77-d7957 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/percona-xtradb-cluster-operator-784d88b77-d7957 condition met percona-xtradb-cluster-operator-784d88b77-d7957.Ok + sleep 3 + create_namespace users-14491 + local namespace=users-14491 + local skip_clean_namespace= + [[ 1 == 1 ]] + [[ -z '' ]] + destroy_chaos_mesh ++ helm list --all-namespaces --filter chaos-mesh ++ tail -n1 ++ awk '-F ' '{print $2}' ++ sed s/NAMESPACE// + local chaos_mesh_ns= + '[' -n '' ']' ++ kubectl get MutatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete MutatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl get ValidatingWebhookConfiguration ++ grep validate-auth ++ awk '{print $1}' + timeout 30 kubectl delete ValidatingWebhookConfiguration error: resource(s) were provided, but no name was specified + : ++ kubectl api-resources ++ grep chaos-mesh ++ awk '{print $1}' ++ kubectl get crd ++ grep chaos-mesh.org ++ awk '{print $1}' + timeout 30 kubectl delete crd error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrolebinding ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrolebinding error: resource(s) were provided, but no name was specified + : ++ kubectl get clusterrole ++ grep chaos-mesh ++ awk '{print $1}' + timeout 30 kubectl delete clusterrole error: resource(s) were provided, but no name was specified + : + desc 'cleaned up all old namespaces' + set +o xtrace ----------------------------------------------------------------------------------- cleaned up all old namespaces ----------------------------------------------------------------------------------- + kubectl_bin get ns + egrep -v '^kube-|^default$|Terminating|pxc-operator|openshift|^NAME' + '[' -n '' ']' + desc 'cleaned up old namespaces users-14491' + xargs kubectl delete ns + set +o xtrace ----------------------------------------------------------------------------------- cleaned up old namespaces users-14491 ----------------------------------------------------------------------------------- + kubectl_bin delete namespace users-14491 + awk '{print$1}' ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.JVgCtaplxV + local LAST_OUT=/tmp/tmp.du6dWXEWAX ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.c5Q86bjxoA + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-14491 + local LAST_ERR=/tmp/tmp.ImOX5CGTRc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl get ns + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.JVgCtaplxV + cat /tmp/tmp.ImOX5CGTRc + rm /tmp/tmp.JVgCtaplxV /tmp/tmp.ImOX5CGTRc + return 0 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-14491 Error from server (Forbidden): namespaces "default" is forbidden: this namespace may not be deleted + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl delete namespace users-14491 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.du6dWXEWAX + cat /tmp/tmp.c5Q86bjxoA Error from server (NotFound): namespaces "users-14491" not found + rm /tmp/tmp.du6dWXEWAX /tmp/tmp.c5Q86bjxoA + return 1 + : + wait_for_delete namespace/users-14491 + local res=namespace/users-14491 + echo -n 'namespace/users-14491 - ' namespace/users-14491 - + set +o xtrace Error from server (NotFound): namespaces "users-14491" not found + desc 'create namespace users-14491' + set +o xtrace ----------------------------------------------------------------------------------- create namespace users-14491 ----------------------------------------------------------------------------------- + kubectl_bin create namespace users-14491 ++ mktemp + local LAST_OUT=/tmp/tmp.ThRBtqgtza ++ mktemp + local LAST_ERR=/tmp/tmp.eS35MxmxBf + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl create namespace users-14491 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ThRBtqgtza namespace/users-14491 created + cat /tmp/tmp.eS35MxmxBf + rm /tmp/tmp.ThRBtqgtza /tmp/tmp.eS35MxmxBf + return 0 ++ kubectl_bin config current-context +++ mktemp ++ local LAST_OUT=/tmp/tmp.HwAq9K6rBJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Hj2POifH9q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl config current-context ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.HwAq9K6rBJ ++ cat /tmp/tmp.Hj2POifH9q ++ rm /tmp/tmp.HwAq9K6rBJ /tmp/tmp.Hj2POifH9q ++ return 0 + kubectl_bin config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1730-852bae96-1-cluster8 --namespace=users-14491 ++ mktemp + local LAST_OUT=/tmp/tmp.oBqDiWnA4A ++ mktemp + local LAST_ERR=/tmp/tmp.wvnDkGSHPr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl config set-context gke_cloud-dev-112233_us-central1-a_jen-pxc-1730-852bae96-1-cluster8 --namespace=users-14491 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oBqDiWnA4A Context "gke_cloud-dev-112233_us-central1-a_jen-pxc-1730-852bae96-1-cluster8" modified. + cat /tmp/tmp.wvnDkGSHPr + rm /tmp/tmp.oBqDiWnA4A /tmp/tmp.wvnDkGSHPr + return 0 + apply_secrets + desc 'create secrets for cloud storages' + set +o xtrace ----------------------------------------------------------------------------------- create secrets for cloud storages ----------------------------------------------------------------------------------- + '[' -z '' ']' + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/cloud-secret.yml ++ mktemp + local LAST_OUT=/tmp/tmp.gl0ZWs8gix ++ mktemp + local LAST_ERR=/tmp/tmp.m8ikV24kge + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/minio-secret.yml -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/cloud-secret.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.gl0ZWs8gix secret/minio-secret created secret/aws-s3-secret created secret/gcp-cs-secret created secret/azure-secret created + cat /tmp/tmp.m8ikV24kge + rm /tmp/tmp.gl0ZWs8gix /tmp/tmp.m8ikV24kge + return 0 + desc 'create PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create PXC cluster ----------------------------------------------------------------------------------- + newpass=test-password ++ echo -n test-password ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZA== + cluster=some-name + spinup_pxc some-name /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/some-name.yml + local cluster=some-name + local config=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/some-name.yml + local size=3 + local sleep=10 + local secretsFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/secrets.yml + local pxcClientFile=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/client.yml + local port=3306 + desc 'create first PXC cluster' + set +o xtrace ----------------------------------------------------------------------------------- create first PXC cluster ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ftAu8fbCfo ++ mktemp + local LAST_ERR=/tmp/tmp.vGJUgAZrhk + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ftAu8fbCfo secret/my-cluster-secrets created secret/some-name-ssl created secret/some-name-ssl-internal created + cat /tmp/tmp.vGJUgAZrhk + rm /tmp/tmp.ftAu8fbCfo /tmp/tmp.vGJUgAZrhk + return 0 + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/client.yml + '[' -z '' ']' + kubectl_bin apply -f - + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/client.yml + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/client.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1730-852bae96#' + local LAST_OUT=/tmp/tmp.H0nQbfy511 + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + local LAST_ERR=/tmp/tmp.z9Z6hjxSn9 + local exit_status=0 + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-14491~ + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' ++ seq 0 2 + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.H0nQbfy511 deployment.apps/pxc-client created + cat /tmp/tmp.z9Z6hjxSn9 + rm /tmp/tmp.H0nQbfy511 /tmp/tmp.z9Z6hjxSn9 + return 0 + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p ]] + [[ some-name == \d\e\m\a\n\d\-\b\a\c\k\u\p\-\c\l\o\u\d ]] + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.ulGd0oKGSi + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' ++ mktemp + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1730-852bae96#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.4REcqMquoA + local exit_status=0 + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-14491~ ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ulGd0oKGSi perconaxtradbcluster.pxc.percona.com/some-name created + cat /tmp/tmp.4REcqMquoA + rm /tmp/tmp.ulGd0oKGSi /tmp/tmp.4REcqMquoA + return 0 + desc 'check if all 3 Pods started' + set +o xtrace ----------------------------------------------------------------------------------- check if all 3 Pods started ----------------------------------------------------------------------------------- ++ get_proxy some-name ++ local target_cluster=some-name +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.6DSmcFmica ++++ mktemp +++ local LAST_ERR=/tmp/tmp.35MGUCf3Ig +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.6DSmcFmica +++ cat /tmp/tmp.35MGUCf3Ig +++ rm /tmp/tmp.6DSmcFmica /tmp/tmp.35MGUCf3Ig +++ return 0 ++ [[ '' == \t\r\u\e ]] +++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++ mktemp +++ local LAST_OUT=/tmp/tmp.2XQmrm1arL ++++ mktemp +++ local LAST_ERR=/tmp/tmp.WAHAhGFI5w +++ local exit_status=0 ++++ seq 0 2 +++ for i in '$(seq 0 2)' +++ set +e +++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++ exit_status=0 +++ set -e +++ '[' 0 '!=' 0 ']' +++ break +++ cat /tmp/tmp.2XQmrm1arL +++ cat /tmp/tmp.WAHAhGFI5w +++ rm /tmp/tmp.2XQmrm1arL /tmp/tmp.WAHAhGFI5w +++ return 0 ++ [[ true == \t\r\u\e ]] ++ echo some-name-proxysql ++ return + local proxy=some-name-proxysql + kubectl_bin wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-14491 ++ mktemp + local LAST_OUT=/tmp/tmp.Krnkdc48oo ++ mktemp + local LAST_ERR=/tmp/tmp.RA07sJhYq1 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-14491 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-14491 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + for i in '$(seq 0 2)' + set +e + kubectl wait --for=condition=Ready pod -l app.kubernetes.io/instance=monitoring,app.kubernetes.io/managed-by=percona-xtradb-cluster-operator --timeout=300s -n users-14491 + exit_status=1 + set -e + '[' 1 '!=' 0 ']' + '[' 1 == 1 ']' + sleep 0 + cat /tmp/tmp.Krnkdc48oo + cat /tmp/tmp.RA07sJhYq1 error: no matching resources found + rm /tmp/tmp.Krnkdc48oo /tmp/tmp.RA07sJhYq1 + return 1 + true + wait_for_running some-name-proxysql 1 + local name=some-name-proxysql + let last_pod=0 + : + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 0 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-proxysql-0 480 + local pod=some-name-proxysql-0 + local max_retry=480 + local ns= ++ echo some-name-proxysql-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=proxysql + set +o xtrace pod/some-name-proxysql-0 condition met some-name-proxysql-0.Ok + wait_for_running some-name-pxc 3 + local name=some-name-pxc + let last_pod=2 + local max_retry=480 + desc 'wait for running cluster' + set +o xtrace ----------------------------------------------------------------------------------- wait for running cluster ----------------------------------------------------------------------------------- ++ seq 0 2 + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-0 480 + local pod=some-name-pxc-0 + local max_retry=480 + local ns= ++ echo some-name-pxc-0 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-0 condition met some-name-pxc-0.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-1 480 + local pod=some-name-pxc-1 + local max_retry=480 + local ns= ++ echo some-name-pxc-1 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-1 condition met some-name-pxc-1.Ok + for i in '$(seq 0 $last_pod)' + wait_pod some-name-pxc-2 480 + local pod=some-name-pxc-2 + local max_retry=480 + local ns= ++ echo some-name-pxc-2 ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container=pxc + set +o xtrace pod/some-name-pxc-2 condition met some-name-pxc-2.Ok + sleep 10 + desc 'write data' + set +o xtrace ----------------------------------------------------------------------------------- write data ----------------------------------------------------------------------------------- + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QM9LihEytI +++ mktemp ++ local LAST_ERR=/tmp/tmp.F2RLVkOsOF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QM9LihEytI ++ cat /tmp/tmp.F2RLVkOsOF Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.QM9LihEytI /tmp/tmp.F2RLVkOsOF ++ return 0 + [[ -n '' ]] + run_mysql 'CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=CREATE DATABASE IF NOT EXISTS myApp; use myApp; CREATE TABLE IF NOT EXISTS myApp (id int PRIMARY KEY) ;' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.KV4KFTRsW5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.rC9FB0n0Dm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.KV4KFTRsW5 ++ cat /tmp/tmp.rC9FB0n0Dm ++ rm /tmp/tmp.KV4KFTRsW5 /tmp/tmp.rC9FB0n0Dm ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + run_mysql 'INSERT myApp.myApp (id) VALUES (100500)' '-h some-name-proxysql -uroot -proot_password -P3306' + local 'command=INSERT myApp.myApp (id) VALUES (100500)' + local 'uri=-h some-name-proxysql -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.W3RC2BR9L7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.2zDWBc9Z8D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.W3RC2BR9L7 ++ cat /tmp/tmp.2zDWBc9Z8D ++ rm /tmp/tmp.W3RC2BR9L7 /tmp/tmp.2zDWBc9Z8D ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + sleep 30 ++ seq 0 2 + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-0.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PBJMhs2P8h +++ mktemp ++ local LAST_ERR=/tmp/tmp.gZRYHNa3sm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PBJMhs2P8h ++ cat /tmp/tmp.gZRYHNa3sm ++ rm /tmp/tmp.PBJMhs2P8h /tmp/tmp.gZRYHNa3sm ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1.sql /tmp/tmp.wXQqG5rnGC/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-1.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.QRLTSRLlPc +++ mktemp ++ local LAST_ERR=/tmp/tmp.zWT5PHlSOZ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.QRLTSRLlPc ++ cat /tmp/tmp.zWT5PHlSOZ ++ rm /tmp/tmp.QRLTSRLlPc /tmp/tmp.zWT5PHlSOZ ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ egrep '^(pxc|proxysql)$' ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1.sql /tmp/tmp.wXQqG5rnGC/select-1.sql + for i in '$(seq 0 $((size - 1)))' + compare_mysql_cmd select-1 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local command_id=select-1 + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SELECT * from myApp.myApp;' '-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' + local 'command=SELECT * from myApp.myApp;' + local 'uri=-h some-name-pxc-2.some-name-pxc -uroot -proot_password -P3306' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rRMnvnzShW +++ mktemp ++ local LAST_ERR=/tmp/tmp.lxL4ibsfA4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rRMnvnzShW ++ cat /tmp/tmp.lxL4ibsfA4 ++ rm /tmp/tmp.rRMnvnzShW /tmp/tmp.lxL4ibsfA4 ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-1.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-1.sql /tmp/tmp.wXQqG5rnGC/select-1.sql ++ is_keyring_plugin_in_use some-name ++ local cluster=some-name ++ kubectl_bin exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ egrep -o 'early-plugin-load=keyring_\w+.so' +++ mktemp ++ local LAST_OUT=/tmp/tmp.DZFE8nOc6e +++ mktemp ++ local LAST_ERR=/tmp/tmp.huUqjhy8m5 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl exec -it some-name-pxc-0 -c pxc -- bash -c 'cat /etc/mysql/node.cnf' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.DZFE8nOc6e ++ cat /tmp/tmp.huUqjhy8m5 Unable to use a TTY - input is not a terminal or the right kind of file ++ rm /tmp/tmp.DZFE8nOc6e /tmp/tmp.huUqjhy8m5 ++ return 0 + '[' '' ']' + desc 'test root' + set +o xtrace ----------------------------------------------------------------------------------- test root ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets root dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=root + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.E4FSM1mZgs ++ mktemp + local LAST_ERR=/tmp/tmp.zY0nD5EqiU + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"root": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.E4FSM1mZgs secret/my-cluster-secrets patched + cat /tmp/tmp.zY0nD5EqiU + rm /tmp/tmp.E4FSM1mZgs /tmp/tmp.zY0nD5EqiU + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uroot -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uroot -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.9rKqjjTI2P +++ mktemp ++ local LAST_ERR=/tmp/tmp.k1LuXnEtyM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.9rKqjjTI2P ++ cat /tmp/tmp.k1LuXnEtyM ++ rm /tmp/tmp.9rKqjjTI2P /tmp/tmp.k1LuXnEtyM ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql /tmp/tmp.wXQqG5rnGC/select-4.sql + desc 'test proxyadmin' + set +o xtrace ----------------------------------------------------------------------------------- test proxyadmin ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.6MIYpIZLTW ++ mktemp + local LAST_ERR=/tmp/tmp.bubKkV8Pzr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":3}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.6MIYpIZLTW perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.bubKkV8Pzr + rm /tmp/tmp.6MIYpIZLTW /tmp/tmp.bubKkV8Pzr + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.kx0iLKNKtR +++ mktemp ++ local LAST_ERR=/tmp/tmp.OR1wFmQsGL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.kx0iLKNKtR ++ cat /tmp/tmp.OR1wFmQsGL ++ rm /tmp/tmp.kx0iLKNKtR /tmp/tmp.OR1wFmQsGL ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.WhfFc3Hr9l +++ mktemp ++ local LAST_ERR=/tmp/tmp.Aai4sijCC6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.WhfFc3Hr9l ++ cat /tmp/tmp.Aai4sijCC6 ++ rm /tmp/tmp.WhfFc3Hr9l /tmp/tmp.Aai4sijCC6 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.aa5aNlYFYy ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hKpDap3NEa +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.aa5aNlYFYy +++++ cat /tmp/tmp.hKpDap3NEa +++++ rm /tmp/tmp.aa5aNlYFYy /tmp/tmp.hKpDap3NEa +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.fpGd1BYSbo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.a6exSkch4o +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.fpGd1BYSbo +++++ cat /tmp/tmp.a6exSkch4o +++++ rm /tmp/tmp.fpGd1BYSbo /tmp/tmp.a6exSkch4o +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.iYPC7ubpdm +++ mktemp ++ local LAST_ERR=/tmp/tmp.752shDkdvd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.iYPC7ubpdm ++ cat /tmp/tmp.752shDkdvd ++ rm /tmp/tmp.iYPC7ubpdm /tmp/tmp.752shDkdvd ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets proxyadmin dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=proxyadmin + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.fJ9Mzqluxs ++ mktemp + local LAST_ERR=/tmp/tmp.DqcdrRy6My + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"proxyadmin": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.fJ9Mzqluxs secret/my-cluster-secrets patched + cat /tmp/tmp.DqcdrRy6My + rm /tmp/tmp.fJ9Mzqluxs /tmp/tmp.DqcdrRy6My + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.MlVzqvoOfi +++ mktemp ++ local LAST_ERR=/tmp/tmp.kq9uzubh2D ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.MlVzqvoOfi ++ cat /tmp/tmp.kq9uzubh2D ++ rm /tmp/tmp.MlVzqvoOfi /tmp/tmp.kq9uzubh2D ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BQvT9Yj2R0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.yHjRo1jwuf ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BQvT9Yj2R0 ++ cat /tmp/tmp.yHjRo1jwuf ++ rm /tmp/tmp.BQvT9Yj2R0 /tmp/tmp.yHjRo1jwuf ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.PHobJJIBti +++ mktemp ++ local LAST_ERR=/tmp/tmp.vEr7xyqs0d ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.PHobJJIBti ++ cat /tmp/tmp.vEr7xyqs0d ++ rm /tmp/tmp.PHobJJIBti /tmp/tmp.vEr7xyqs0d ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NfaaEc4Heb +++ mktemp ++ local LAST_ERR=/tmp/tmp.eeASwayct2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NfaaEc4Heb ++ cat /tmp/tmp.eeASwayct2 ++ rm /tmp/tmp.NfaaEc4Heb /tmp/tmp.eeASwayct2 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.HX5amQ1syf ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.CSc1GbkIjK +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.HX5amQ1syf +++++ cat /tmp/tmp.CSc1GbkIjK +++++ rm /tmp/tmp.HX5amQ1syf /tmp/tmp.CSc1GbkIjK +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.jbvRsuDfdq ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.2BNbChVaAr +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.jbvRsuDfdq +++++ cat /tmp/tmp.2BNbChVaAr +++++ rm /tmp/tmp.jbvRsuDfdq /tmp/tmp.2BNbChVaAr +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RQ5mYs60Dr +++ mktemp ++ local LAST_ERR=/tmp/tmp.NOygmnitC6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RQ5mYs60Dr ++ cat /tmp/tmp.NOygmnitC6 ++ rm /tmp/tmp.RQ5mYs60Dr /tmp/tmp.NOygmnitC6 ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-0 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-0 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2.sql /tmp/tmp.wXQqG5rnGC/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-1 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-1 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2.sql /tmp/tmp.wXQqG5rnGC/select-2.sql + compare_mysql_cmd_local select-2 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 '' proxysql + local command_id=select-2 + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local postfix= + local container_name=proxysql + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW TABLES;' '-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' some-name-proxysql-2 proxysql + local 'command=SHOW TABLES;' + local 'uri=-h127.0.0.1 -P6032 -uproxyadmin -p'\''test-password'\''' + local pod=some-name-proxysql-2 + local container_name=proxysql + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-2.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-2.sql /tmp/tmp.wXQqG5rnGC/select-2.sql + desc 'test xtrabackup' + set +o xtrace ----------------------------------------------------------------------------------- test xtrabackup ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' ++ mktemp + local LAST_OUT=/tmp/tmp.BmXgNP3JRi ++ mktemp + local LAST_ERR=/tmp/tmp.Fe4CfMPWQe + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type=merge '-p={"spec":{"proxysql":{"size":2}}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.BmXgNP3JRi perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.Fe4CfMPWQe + rm /tmp/tmp.BmXgNP3JRi /tmp/tmp.Fe4CfMPWQe + return 0 + patch_secret my-cluster-secrets xtrabackup dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=xtrabackup + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.oH6gek4XHg ++ mktemp + local LAST_ERR=/tmp/tmp.EOXY0H73Xc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"xtrabackup": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.oH6gek4XHg secret/my-cluster-secrets patched + cat /tmp/tmp.EOXY0H73Xc + rm /tmp/tmp.oH6gek4XHg /tmp/tmp.EOXY0H73Xc + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7zWoMw5VFJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.kBM5aLf5Kb ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7zWoMw5VFJ ++ cat /tmp/tmp.kBM5aLf5Kb ++ rm /tmp/tmp.7zWoMw5VFJ /tmp/tmp.kBM5aLf5Kb ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.YBTdDkCgVX +++ mktemp ++ local LAST_ERR=/tmp/tmp.j4J0V5cAkO ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.YBTdDkCgVX ++ cat /tmp/tmp.j4J0V5cAkO ++ rm /tmp/tmp.YBTdDkCgVX /tmp/tmp.j4J0V5cAkO ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TCOs4EALyH +++ mktemp ++ local LAST_ERR=/tmp/tmp.wDy2ZqjIyl ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TCOs4EALyH ++ cat /tmp/tmp.wDy2ZqjIyl ++ rm /tmp/tmp.TCOs4EALyH /tmp/tmp.wDy2ZqjIyl ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7cVN5ZfOwR +++ mktemp ++ local LAST_ERR=/tmp/tmp.hBStn5kutY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7cVN5ZfOwR ++ cat /tmp/tmp.hBStn5kutY ++ rm /tmp/tmp.7cVN5ZfOwR /tmp/tmp.hBStn5kutY ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rUra2byLRg +++ mktemp ++ local LAST_ERR=/tmp/tmp.D76H2mkuzd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rUra2byLRg ++ cat /tmp/tmp.D76H2mkuzd ++ rm /tmp/tmp.rUra2byLRg /tmp/tmp.D76H2mkuzd ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.OasFUEWcPl +++ mktemp ++ local LAST_ERR=/tmp/tmp.wT6pU5FRYJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.OasFUEWcPl ++ cat /tmp/tmp.wT6pU5FRYJ ++ rm /tmp/tmp.OasFUEWcPl /tmp/tmp.wT6pU5FRYJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Uy8R6UQnD3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.zFe6XJLZcp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Uy8R6UQnD3 ++ cat /tmp/tmp.zFe6XJLZcp ++ rm /tmp/tmp.Uy8R6UQnD3 /tmp/tmp.zFe6XJLZcp ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2LaxPAG53U +++ mktemp ++ local LAST_ERR=/tmp/tmp.l70iiCwQ00 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2LaxPAG53U ++ cat /tmp/tmp.l70iiCwQ00 ++ rm /tmp/tmp.2LaxPAG53U /tmp/tmp.l70iiCwQ00 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.njD87UgV8D +++ mktemp ++ local LAST_ERR=/tmp/tmp.psTwX4XCtA ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.njD87UgV8D ++ cat /tmp/tmp.psTwX4XCtA ++ rm /tmp/tmp.njD87UgV8D /tmp/tmp.psTwX4XCtA ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EUQqMwaDcO +++ mktemp ++ local LAST_ERR=/tmp/tmp.zzXXECqCWm ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EUQqMwaDcO ++ cat /tmp/tmp.zzXXECqCWm ++ rm /tmp/tmp.EUQqMwaDcO /tmp/tmp.zzXXECqCWm ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.PUOWHCazov ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.LurS7lF3RX +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.PUOWHCazov +++++ cat /tmp/tmp.LurS7lF3RX +++++ rm /tmp/tmp.PUOWHCazov /tmp/tmp.LurS7lF3RX +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.KDQDaHXCTb ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.prpX8mxho1 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.KDQDaHXCTb +++++ cat /tmp/tmp.prpX8mxho1 +++++ rm /tmp/tmp.KDQDaHXCTb /tmp/tmp.prpX8mxho1 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.87RFw8v4d7 +++ mktemp ++ local LAST_ERR=/tmp/tmp.7nOd5QHhyp ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.87RFw8v4d7 ++ cat /tmp/tmp.7nOd5QHhyp ++ rm /tmp/tmp.87RFw8v4d7 /tmp/tmp.7nOd5QHhyp ++ return 0 + [[ 2 == \2 ]] + compare_mysql_cmd_local select-3 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 '' pxc + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local postfix= + local container_name=pxc + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql_local 'SHOW DATABASES;' '-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' some-name-pxc-0 pxc + local 'command=SHOW DATABASES;' + local 'uri=-h 127.0.0.1 -uxtrabackup -p'\''test-password'\''' + local pod=some-name-pxc-0 + local container_name=pxc + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-3.sql /tmp/tmp.wXQqG5rnGC/select-3.sql + desc 'test monitor' + set +o xtrace ----------------------------------------------------------------------------------- test monitor ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.f1Q98slfCj ++ mktemp + local LAST_ERR=/tmp/tmp.626uy3scvr + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.f1Q98slfCj secret/my-cluster-secrets patched + cat /tmp/tmp.626uy3scvr + rm /tmp/tmp.f1Q98slfCj /tmp/tmp.626uy3scvr + return 0 + wait_for_password_propagation my-cluster-secrets monitor + local secret=my-cluster-secrets + local user=monitor + local max_retry=240 ++ getSecretData my-cluster-secrets root ++ local secretName=my-cluster-secrets ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.1RoqAk20VP +++ mktemp ++ local LAST_ERR=/tmp/tmp.PjGm0S8vaU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1RoqAk20VP ++ cat /tmp/tmp.PjGm0S8vaU ++ rm /tmp/tmp.1RoqAk20VP /tmp/tmp.PjGm0S8vaU ++ return 0 + local root_pass=test-password + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 5\.7 ]] + echo 'Skipping dual password feature doesn'\''t work for 5.7. PXC 5.7 doesn'\''t support it!' Skipping dual password feature doesn't work for 5.7. PXC 5.7 doesn't support it! + return + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zIwoxgarVn +++ mktemp ++ local LAST_ERR=/tmp/tmp.LfiN8jhLRT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zIwoxgarVn ++ cat /tmp/tmp.LfiN8jhLRT ++ rm /tmp/tmp.zIwoxgarVn /tmp/tmp.LfiN8jhLRT ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SstBAqYxhq +++ mktemp ++ local LAST_ERR=/tmp/tmp.9bdyR2P7hw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SstBAqYxhq ++ cat /tmp/tmp.9bdyR2P7hw ++ rm /tmp/tmp.SstBAqYxhq /tmp/tmp.9bdyR2P7hw ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7wWvjj287G +++ mktemp ++ local LAST_ERR=/tmp/tmp.8Rbwrm4FiU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7wWvjj287G ++ cat /tmp/tmp.8Rbwrm4FiU ++ rm /tmp/tmp.7wWvjj287G /tmp/tmp.8Rbwrm4FiU ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JLAIs9Gyyz +++ mktemp ++ local LAST_ERR=/tmp/tmp.XOwdQhNBcH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JLAIs9Gyyz ++ cat /tmp/tmp.XOwdQhNBcH ++ rm /tmp/tmp.JLAIs9Gyyz /tmp/tmp.XOwdQhNBcH ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZI7sUZGb7g +++ mktemp ++ local LAST_ERR=/tmp/tmp.AmX6isQwWn ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZI7sUZGb7g ++ cat /tmp/tmp.AmX6isQwWn ++ rm /tmp/tmp.ZI7sUZGb7g /tmp/tmp.AmX6isQwWn ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.AocS5ntkv9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.8MHbve07tq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.AocS5ntkv9 +++++ cat /tmp/tmp.8MHbve07tq +++++ rm /tmp/tmp.AocS5ntkv9 /tmp/tmp.8MHbve07tq +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.CN5jwJuUCo ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.GUHhv5y6ei +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.CN5jwJuUCo +++++ cat /tmp/tmp.GUHhv5y6ei +++++ rm /tmp/tmp.CN5jwJuUCo /tmp/tmp.GUHhv5y6ei +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.5ys2M3xhVi +++ mktemp ++ local LAST_ERR=/tmp/tmp.05p1SdjKEz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.5ys2M3xhVi ++ cat /tmp/tmp.05p1SdjKEz ++ rm /tmp/tmp.5ys2M3xhVi /tmp/tmp.05p1SdjKEz ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -umonitor -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -umonitor -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.jlFFUAH7fz +++ mktemp ++ local LAST_ERR=/tmp/tmp.kaRpBFXRwE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.jlFFUAH7fz ++ cat /tmp/tmp.kaRpBFXRwE ++ rm /tmp/tmp.jlFFUAH7fz /tmp/tmp.kaRpBFXRwE ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql /tmp/tmp.wXQqG5rnGC/select-4.sql + desc 'test operator' + set +o xtrace ----------------------------------------------------------------------------------- test operator ----------------------------------------------------------------------------------- + patch_secret my-cluster-secrets operator dGVzdC1wYXNzd29yZA== + local secret=my-cluster-secrets + local key=operator + local value=dGVzdC1wYXNzd29yZA== + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.k5yQveCGuO ++ mktemp + local LAST_ERR=/tmp/tmp.SBA9gWSPpc + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"operator": "dGVzdC1wYXNzd29yZA=="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.k5yQveCGuO secret/my-cluster-secrets patched + cat /tmp/tmp.SBA9gWSPpc + rm /tmp/tmp.k5yQveCGuO /tmp/tmp.SBA9gWSPpc + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SWIJQTKxzn +++ mktemp ++ local LAST_ERR=/tmp/tmp.wvykB14ZDL ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SWIJQTKxzn ++ cat /tmp/tmp.wvykB14ZDL ++ rm /tmp/tmp.SWIJQTKxzn /tmp/tmp.wvykB14ZDL ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.TdanWAnX99 +++ mktemp ++ local LAST_ERR=/tmp/tmp.D6EmBudXxC ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.TdanWAnX99 ++ cat /tmp/tmp.D6EmBudXxC ++ rm /tmp/tmp.TdanWAnX99 /tmp/tmp.D6EmBudXxC ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vic520z6ai +++ mktemp ++ local LAST_ERR=/tmp/tmp.iw9MGFXNxu ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vic520z6ai ++ cat /tmp/tmp.iw9MGFXNxu ++ rm /tmp/tmp.vic520z6ai /tmp/tmp.iw9MGFXNxu ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.4FxQIBZ0gm ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.dPygq6tdwH +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.4FxQIBZ0gm +++++ cat /tmp/tmp.dPygq6tdwH +++++ rm /tmp/tmp.4FxQIBZ0gm /tmp/tmp.dPygq6tdwH +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.e2Ghh2KQc5 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.1pp0GbRZ0Y +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.e2Ghh2KQc5 +++++ cat /tmp/tmp.1pp0GbRZ0Y +++++ rm /tmp/tmp.e2Ghh2KQc5 /tmp/tmp.1pp0GbRZ0Y +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.I9HOrBvipO +++ mktemp ++ local LAST_ERR=/tmp/tmp.mU0pPd38RE ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.I9HOrBvipO ++ cat /tmp/tmp.mU0pPd38RE ++ rm /tmp/tmp.I9HOrBvipO /tmp/tmp.mU0pPd38RE ++ return 0 + [[ 2 == \2 ]] + sleep 10 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hBKtn2NoSe +++ mktemp ++ local LAST_ERR=/tmp/tmp.z3Y5WC4bMR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hBKtn2NoSe ++ cat /tmp/tmp.z3Y5WC4bMR ++ rm /tmp/tmp.hBKtn2NoSe /tmp/tmp.z3Y5WC4bMR ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql /tmp/tmp.wXQqG5rnGC/select-4.sql + desc 'change secret name' + set +o xtrace ----------------------------------------------------------------------------------- change secret name ----------------------------------------------------------------------------------- + kubectl_bin patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' ++ mktemp + local LAST_OUT=/tmp/tmp.2pSYJlk7z7 ++ mktemp + local LAST_ERR=/tmp/tmp.oabgfEGbFW + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch pxc some-name --type merge --patch '{"spec": {"secretsName":"my-cluster-secrets-2"}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.2pSYJlk7z7 perconaxtradbcluster.pxc.percona.com/some-name patched + cat /tmp/tmp.oabgfEGbFW + rm /tmp/tmp.2pSYJlk7z7 /tmp/tmp.oabgfEGbFW + return 0 + sleep 30 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IRX0HiIqt6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.KSXCbFtjjk ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IRX0HiIqt6 ++ cat /tmp/tmp.KSXCbFtjjk ++ rm /tmp/tmp.IRX0HiIqt6 /tmp/tmp.KSXCbFtjjk ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.cg0QelQ0nZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.d0EKgcP59q ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.cg0QelQ0nZ ++ cat /tmp/tmp.d0EKgcP59q ++ rm /tmp/tmp.cg0QelQ0nZ /tmp/tmp.d0EKgcP59q ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.c1sgqAmWGd +++ mktemp ++ local LAST_ERR=/tmp/tmp.JlZDDLvBg6 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.c1sgqAmWGd ++ cat /tmp/tmp.JlZDDLvBg6 ++ rm /tmp/tmp.c1sgqAmWGd /tmp/tmp.JlZDDLvBg6 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zZsT75NIXX +++ mktemp ++ local LAST_ERR=/tmp/tmp.h7lkJ6Xkiz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zZsT75NIXX ++ cat /tmp/tmp.h7lkJ6Xkiz ++ rm /tmp/tmp.zZsT75NIXX /tmp/tmp.h7lkJ6Xkiz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.2UHNMFekva +++ mktemp ++ local LAST_ERR=/tmp/tmp.8zGQPvQQZq ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.2UHNMFekva ++ cat /tmp/tmp.8zGQPvQQZq ++ rm /tmp/tmp.2UHNMFekva /tmp/tmp.8zGQPvQQZq ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.47TBvxQMjD +++ mktemp ++ local LAST_ERR=/tmp/tmp.7K9O4Yrlq3 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.47TBvxQMjD ++ cat /tmp/tmp.7K9O4Yrlq3 ++ rm /tmp/tmp.47TBvxQMjD /tmp/tmp.7K9O4Yrlq3 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.dmzK7kLHTs +++ mktemp ++ local LAST_ERR=/tmp/tmp.4DN1mXGwed ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.dmzK7kLHTs ++ cat /tmp/tmp.4DN1mXGwed ++ rm /tmp/tmp.dmzK7kLHTs /tmp/tmp.4DN1mXGwed ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Tp3Wv3NULv +++ mktemp ++ local LAST_ERR=/tmp/tmp.sor9ACoCre ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Tp3Wv3NULv ++ cat /tmp/tmp.sor9ACoCre ++ rm /tmp/tmp.Tp3Wv3NULv /tmp/tmp.sor9ACoCre ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.WKlu5kEzi9 ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.yC7hVuGbzq +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.WKlu5kEzi9 +++++ cat /tmp/tmp.yC7hVuGbzq +++++ rm /tmp/tmp.WKlu5kEzi9 /tmp/tmp.yC7hVuGbzq +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.zVkUftIDKO ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.agix2UEgWP +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.zVkUftIDKO +++++ cat /tmp/tmp.agix2UEgWP +++++ rm /tmp/tmp.zVkUftIDKO /tmp/tmp.agix2UEgWP +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NueLbrEt8O +++ mktemp ++ local LAST_ERR=/tmp/tmp.XWu2fgRFbd ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NueLbrEt8O ++ cat /tmp/tmp.XWu2fgRFbd ++ rm /tmp/tmp.NueLbrEt8O /tmp/tmp.XWu2fgRFbd ++ return 0 + [[ 2 == \2 ]] + desc 'test new operator' + set +o xtrace ----------------------------------------------------------------------------------- test new operator ----------------------------------------------------------------------------------- + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + patch_secret my-cluster-secrets-2 operator dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets-2 + local key=operator + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.5WcKZaep6G ++ mktemp + local LAST_ERR=/tmp/tmp.6Cfe10gJuP + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets-2 '-p={"data":{"operator": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.5WcKZaep6G secret/my-cluster-secrets-2 patched + cat /tmp/tmp.6Cfe10gJuP + rm /tmp/tmp.5WcKZaep6G /tmp/tmp.6Cfe10gJuP + return 0 + sleep 15 + wait_cluster_consistency some-name 3 2 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=2 + '[' -z 2 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.rwdOtOOTBb +++ mktemp ++ local LAST_ERR=/tmp/tmp.vNVlMUQG82 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.rwdOtOOTBb ++ cat /tmp/tmp.vNVlMUQG82 ++ rm /tmp/tmp.rwdOtOOTBb /tmp/tmp.vNVlMUQG82 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xAW26Li9Tz +++ mktemp ++ local LAST_ERR=/tmp/tmp.MkStayJV4x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xAW26Li9Tz ++ cat /tmp/tmp.MkStayJV4x ++ rm /tmp/tmp.xAW26Li9Tz /tmp/tmp.MkStayJV4x ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.BrbsahDCra +++ mktemp ++ local LAST_ERR=/tmp/tmp.Xka0TuF2Il ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.BrbsahDCra ++ cat /tmp/tmp.Xka0TuF2Il ++ rm /tmp/tmp.BrbsahDCra /tmp/tmp.Xka0TuF2Il ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.RyCNmI7DQv ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.chct0Ma8vY +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.RyCNmI7DQv +++++ cat /tmp/tmp.chct0Ma8vY +++++ rm /tmp/tmp.RyCNmI7DQv /tmp/tmp.chct0Ma8vY +++++ return 0 ++++ [[ '' == \t\r\u\e ]] +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.0uFnCM4bnV ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.OlKs5z5L12 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.proxysql.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.0uFnCM4bnV +++++ cat /tmp/tmp.OlKs5z5L12 +++++ rm /tmp/tmp.0uFnCM4bnV /tmp/tmp.OlKs5z5L12 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-proxysql ++++ return +++ local cluster_proxy=some-name-proxysql +++ echo proxysql ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.proxysql.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.AqddNOO3MT +++ mktemp ++ local LAST_ERR=/tmp/tmp.DW9xLmIUp1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.proxysql.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AqddNOO3MT ++ cat /tmp/tmp.DW9xLmIUp1 ++ rm /tmp/tmp.AqddNOO3MT /tmp/tmp.DW9xLmIUp1 ++ return 0 + [[ 2 == \2 ]] + sleep 20 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.vgs0WXhG68 +++ mktemp ++ local LAST_ERR=/tmp/tmp.jfHwrerGS2 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.vgs0WXhG68 ++ cat /tmp/tmp.jfHwrerGS2 ++ rm /tmp/tmp.vgs0WXhG68 /tmp/tmp.jfHwrerGS2 ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql /tmp/tmp.wXQqG5rnGC/select-4.sql ++ getSecretData my-cluster-secrets-2 root ++ local secretName=my-cluster-secrets-2 ++ local dataKey=root ++ kubectl_bin get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.3IpQx6dehk +++ mktemp ++ local LAST_ERR=/tmp/tmp.4LbelXTtXY ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/my-cluster-secrets-2 '--template={{.data.root}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.3IpQx6dehk ++ cat /tmp/tmp.4LbelXTtXY ++ rm /tmp/tmp.3IpQx6dehk /tmp/tmp.4LbelXTtXY ++ return 0 + newpass='(^P&~@Rjt5K-IvNWS' + desc 'test new users sync' + set +o xtrace ----------------------------------------------------------------------------------- test new users sync ----------------------------------------------------------------------------------- + run_mysql 'CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''(^P&~@Rjt5K-IvNWS'\'';' '-h some-name-pxc -uroot -p'\''(^P&~@Rjt5K-IvNWS'\''' + local 'command=CREATE USER '\''testsync'\''@'\''%'\'' IDENTIFIED BY '\''(^P&~@Rjt5K-IvNWS'\'';' + local 'uri=-h some-name-pxc -uroot -p'\''(^P&~@Rjt5K-IvNWS'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ImAYojgBND +++ mktemp ++ local LAST_ERR=/tmp/tmp.R3KAHcJoHM ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ImAYojgBND ++ cat /tmp/tmp.R3KAHcJoHM ++ rm /tmp/tmp.ImAYojgBND /tmp/tmp.R3KAHcJoHM ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + sleep 40 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''(^P&~@Rjt5K-IvNWS'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''(^P&~@Rjt5K-IvNWS'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -utestsync -p'\''(^P&~@Rjt5K-IvNWS'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -utestsync -p'\''(^P&~@Rjt5K-IvNWS'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.4RiHLaKTzj +++ mktemp ++ local LAST_ERR=/tmp/tmp.CyqZrg2V5n ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.4RiHLaKTzj ++ cat /tmp/tmp.CyqZrg2V5n ++ rm /tmp/tmp.4RiHLaKTzj /tmp/tmp.CyqZrg2V5n ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql /tmp/tmp.wXQqG5rnGC/select-4.sql ++ getSecretData internal-some-name operator ++ local secretName=internal-some-name ++ local dataKey=operator ++ kubectl_bin get secrets/internal-some-name '--template={{.data.operator}}' ++ base64 --decode +++ mktemp ++ local LAST_OUT=/tmp/tmp.sI4hFIVycz +++ mktemp ++ local LAST_ERR=/tmp/tmp.0hF8R7YCGR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get secrets/internal-some-name '--template={{.data.operator}}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.sI4hFIVycz ++ cat /tmp/tmp.0hF8R7YCGR ++ rm /tmp/tmp.sI4hFIVycz /tmp/tmp.0hF8R7YCGR ++ return 0 + pass=test-password2 + desc 'check secret without operator' + set +o xtrace ----------------------------------------------------------------------------------- check secret without operator ----------------------------------------------------------------------------------- + kubectl_bin apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/conf/secrets.yml ++ mktemp + local LAST_OUT=/tmp/tmp.ocCIGOdlGk ++ mktemp + local LAST_ERR=/tmp/tmp.dQJHO6tbyM + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/conf/secrets.yml + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ocCIGOdlGk secret/my-cluster-secrets-2 configured + cat /tmp/tmp.dQJHO6tbyM Warning: resource secrets/my-cluster-secrets-2 is missing the kubectl.kubernetes.io/last-applied-configuration annotation which is required by kubectl apply. kubectl apply should only be used on resources created declaratively by either kubectl create --save-config or kubectl apply. The missing annotation will be patched automatically. + rm /tmp/tmp.ocCIGOdlGk /tmp/tmp.dQJHO6tbyM + return 0 + sleep 15 + compare_mysql_cmd select-4 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local command_id=select-4 + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW TABLES;' '-h some-name-proxysql -uoperator -p'\''test-password2'\''' + local 'command=SHOW TABLES;' + local 'uri=-h some-name-proxysql -uoperator -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.lppVpSb1hr +++ mktemp ++ local LAST_ERR=/tmp/tmp.3D92ZgZdqU ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.lppVpSb1hr ++ cat /tmp/tmp.3D92ZgZdqU ++ rm /tmp/tmp.lppVpSb1hr /tmp/tmp.3D92ZgZdqU ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-4.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-4.sql /tmp/tmp.wXQqG5rnGC/select-4.sql + newpass=test-password2 ++ echo -n test-password2 ++ base64 + newpassencrypted=dGVzdC1wYXNzd29yZDI= + apply_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/conf/some-name.yml + '[' -z '' ']' + cat_config /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/conf/some-name.yml + kubectl_bin apply -f - + cat /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/conf/some-name.yml ++ mktemp + /usr/bin/sed -e 's#apiVersion: pxc.percona.com/v.*$#apiVersion: pxc.percona.com/v1#' + /usr/bin/sed -e 's#image:.*-pxc\([0-9]*.[0-9]*\)\{0,1\}$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + local LAST_OUT=/tmp/tmp.lPlZEFB8Q4 + /usr/bin/sed -e 's#image:.*\/percona-xtradb-cluster:.*$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7#' + /usr/bin/sed -e 's#image:.*-init$#image: perconalab/percona-xtradb-cluster-operator:PR-1730-852bae96#' + /usr/bin/sed -e 's#image:.*-backup$#image: perconalab/percona-xtradb-cluster-operator:main-pxc5.7-backup#' + /usr/bin/sed -e 's#image:.*-proxysql$#image: perconalab/percona-xtradb-cluster-operator:main-proxysql#' + /usr/bin/sed -e 's#image:.*-logcollector$#image: perconalab/percona-xtradb-cluster-operator:main-logcollector#' ++ mktemp + /usr/bin/sed -e s~minio-service.#namespace~minio-service.users-14491~ + /usr/bin/sed -e 's#image:.*-pmm$#image: perconalab/pmm-client:dev-latest#' + /usr/bin/sed -e 's#apply:.*#apply: Never#' + /usr/bin/sed -e 's#image:.*-haproxy$#image: perconalab/percona-xtradb-cluster-operator:main-haproxy#' + local LAST_ERR=/tmp/tmp.Flb1GVlQUs + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl apply -f - + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.lPlZEFB8Q4 perconaxtradbcluster.pxc.percona.com/some-name configured + cat /tmp/tmp.Flb1GVlQUs + rm /tmp/tmp.lPlZEFB8Q4 /tmp/tmp.Flb1GVlQUs + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.zg1bQ5sR0p +++ mktemp ++ local LAST_ERR=/tmp/tmp.C9EOS5b3Fe ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.zg1bQ5sR0p ++ cat /tmp/tmp.C9EOS5b3Fe ++ rm /tmp/tmp.zg1bQ5sR0p /tmp/tmp.C9EOS5b3Fe ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.LtKOwHeIz6 +++ mktemp ++ local LAST_ERR=/tmp/tmp.MhfpQ1Q7YH ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.LtKOwHeIz6 ++ cat /tmp/tmp.MhfpQ1Q7YH ++ rm /tmp/tmp.LtKOwHeIz6 /tmp/tmp.MhfpQ1Q7YH ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.f0TaTsQHKb +++ mktemp ++ local LAST_ERR=/tmp/tmp.Ze4nQZVpKR ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.f0TaTsQHKb ++ cat /tmp/tmp.Ze4nQZVpKR ++ rm /tmp/tmp.f0TaTsQHKb /tmp/tmp.Ze4nQZVpKR ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.uewQQiWTe0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.ddWr2CIIAz ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.uewQQiWTe0 ++ cat /tmp/tmp.ddWr2CIIAz ++ rm /tmp/tmp.uewQQiWTe0 /tmp/tmp.ddWr2CIIAz ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 3 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.7X85sO9FsA +++ mktemp ++ local LAST_ERR=/tmp/tmp.DFuDYhRCLv ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.7X85sO9FsA ++ cat /tmp/tmp.DFuDYhRCLv ++ rm /tmp/tmp.7X85sO9FsA /tmp/tmp.DFuDYhRCLv ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 4 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.IpcIZQfzYz +++ mktemp ++ local LAST_ERR=/tmp/tmp.mrPqvZjjev ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.IpcIZQfzYz ++ cat /tmp/tmp.mrPqvZjjev ++ rm /tmp/tmp.IpcIZQfzYz /tmp/tmp.mrPqvZjjev ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 5 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.atX3NaJISJ +++ mktemp ++ local LAST_ERR=/tmp/tmp.PITQYfczz4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.atX3NaJISJ ++ cat /tmp/tmp.PITQYfczz4 ++ rm /tmp/tmp.atX3NaJISJ /tmp/tmp.PITQYfczz4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 6 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.JE4vyrt9Ho +++ mktemp ++ local LAST_ERR=/tmp/tmp.pp4J415Nx4 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.JE4vyrt9Ho ++ cat /tmp/tmp.pp4J415Nx4 ++ rm /tmp/tmp.JE4vyrt9Ho /tmp/tmp.pp4J415Nx4 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 7 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.hz11lNpdu5 +++ mktemp ++ local LAST_ERR=/tmp/tmp.h7StSttEeJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.hz11lNpdu5 ++ cat /tmp/tmp.h7StSttEeJ ++ rm /tmp/tmp.hz11lNpdu5 /tmp/tmp.h7StSttEeJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 8 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ZGEZFMOQpj +++ mktemp ++ local LAST_ERR=/tmp/tmp.821o6RRPir ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ZGEZFMOQpj ++ cat /tmp/tmp.821o6RRPir ++ rm /tmp/tmp.ZGEZFMOQpj /tmp/tmp.821o6RRPir ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 9 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.74kJYrQxP0 +++ mktemp ++ local LAST_ERR=/tmp/tmp.eeO7uFYNuF ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.74kJYrQxP0 ++ cat /tmp/tmp.eeO7uFYNuF ++ rm /tmp/tmp.74kJYrQxP0 /tmp/tmp.eeO7uFYNuF ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 10 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.ukwQdTbaIc +++ mktemp ++ local LAST_ERR=/tmp/tmp.cbg4Q12Z6H ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.ukwQdTbaIc ++ cat /tmp/tmp.cbg4Q12Z6H ++ rm /tmp/tmp.ukwQdTbaIc /tmp/tmp.cbg4Q12Z6H ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 11 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.1EWb3G5zqT +++ mktemp ++ local LAST_ERR=/tmp/tmp.skv5xPp5dc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.1EWb3G5zqT ++ cat /tmp/tmp.skv5xPp5dc ++ rm /tmp/tmp.1EWb3G5zqT /tmp/tmp.skv5xPp5dc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 12 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EAnvYTRzHp +++ mktemp ++ local LAST_ERR=/tmp/tmp.wMh6Hg8KVJ ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EAnvYTRzHp ++ cat /tmp/tmp.wMh6Hg8KVJ ++ rm /tmp/tmp.EAnvYTRzHp /tmp/tmp.wMh6Hg8KVJ ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 13 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.NIGOv1yLH3 +++ mktemp ++ local LAST_ERR=/tmp/tmp.lAQNofPj9x ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.NIGOv1yLH3 ++ cat /tmp/tmp.lAQNofPj9x ++ rm /tmp/tmp.NIGOv1yLH3 /tmp/tmp.lAQNofPj9x ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.RLjKGgmjkd +++ mktemp ++ local LAST_ERR=/tmp/tmp.5s8WT1KzAB ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.RLjKGgmjkd ++ cat /tmp/tmp.5s8WT1KzAB ++ rm /tmp/tmp.RLjKGgmjkd /tmp/tmp.5s8WT1KzAB ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.11eVEDH7OE ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.7QTCoSb8t5 +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.11eVEDH7OE +++++ cat /tmp/tmp.7QTCoSb8t5 +++++ rm /tmp/tmp.11eVEDH7OE /tmp/tmp.7QTCoSb8t5 +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.0xqvTdKdTd +++ mktemp ++ local LAST_ERR=/tmp/tmp.8aCasfu863 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.0xqvTdKdTd ++ cat /tmp/tmp.8aCasfu863 ++ rm /tmp/tmp.0xqvTdKdTd /tmp/tmp.8aCasfu863 ++ return 0 + [[ 3 == \3 ]] + patch_secret my-cluster-secrets monitor dGVzdC1wYXNzd29yZDI= + local secret=my-cluster-secrets + local key=monitor + local value=dGVzdC1wYXNzd29yZDI= + kubectl_bin patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' ++ mktemp + local LAST_OUT=/tmp/tmp.ktf9MZY7bB ++ mktemp + local LAST_ERR=/tmp/tmp.kaM3RqHQLZ + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl patch secret my-cluster-secrets '-p={"data":{"monitor": "dGVzdC1wYXNzd29yZDI="}}' + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.ktf9MZY7bB secret/my-cluster-secrets patched + cat /tmp/tmp.kaM3RqHQLZ + rm /tmp/tmp.ktf9MZY7bB /tmp/tmp.kaM3RqHQLZ + return 0 + sleep 15 + wait_cluster_consistency some-name 3 3 + local cluster_name=some-name + local cluster_size=3 + local proxy_size=3 + '[' -z 3 ']' + desc 'wait cluster consistency' + set +o xtrace ----------------------------------------------------------------------------------- wait cluster consistency ----------------------------------------------------------------------------------- + local i=0 + local max=36 + sleep 7 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.SIiRVxBPyQ +++ mktemp ++ local LAST_ERR=/tmp/tmp.Jl1baG6sVc ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.SIiRVxBPyQ ++ cat /tmp/tmp.Jl1baG6sVc ++ rm /tmp/tmp.SIiRVxBPyQ /tmp/tmp.Jl1baG6sVc ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 0 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.EymTcLQujH +++ mktemp ++ local LAST_ERR=/tmp/tmp.TaLJeA7tR1 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.EymTcLQujH ++ cat /tmp/tmp.TaLJeA7tR1 ++ rm /tmp/tmp.EymTcLQujH /tmp/tmp.TaLJeA7tR1 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 1 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.Rv57szcMCO +++ mktemp ++ local LAST_ERR=/tmp/tmp.HBebRsLCB0 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.Rv57szcMCO ++ cat /tmp/tmp.HBebRsLCB0 ++ rm /tmp/tmp.Rv57szcMCO /tmp/tmp.HBebRsLCB0 ++ return 0 + [[ initializing == \r\e\a\d\y ]] + echo 'waiting for cluster readyness' waiting for cluster readyness + sleep 20 + [[ 2 -ge 36 ]] + let i+=1 ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.state}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xccFmTXLdZ +++ mktemp ++ local LAST_ERR=/tmp/tmp.NrBc4dmfyI ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.state}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xccFmTXLdZ ++ cat /tmp/tmp.NrBc4dmfyI ++ rm /tmp/tmp.xccFmTXLdZ /tmp/tmp.NrBc4dmfyI ++ return 0 + [[ ready == \r\e\a\d\y ]] ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.pxc.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.8UtCWebcEH +++ mktemp ++ local LAST_ERR=/tmp/tmp.pDAkPdL6x7 ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.pxc.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.8UtCWebcEH ++ cat /tmp/tmp.pDAkPdL6x7 ++ rm /tmp/tmp.8UtCWebcEH /tmp/tmp.pDAkPdL6x7 ++ return 0 + [[ 3 == \3 ]] +++ get_proxy_engine some-name +++ local cluster_name=some-name ++++ get_proxy some-name ++++ local target_cluster=some-name +++++ kubectl_bin get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' ++++++ mktemp +++++ local LAST_OUT=/tmp/tmp.1daOQChUiS ++++++ mktemp +++++ local LAST_ERR=/tmp/tmp.hp2l5GgXry +++++ local exit_status=0 ++++++ seq 0 2 +++++ for i in '$(seq 0 2)' +++++ set +e +++++ kubectl get pxc some-name -o 'jsonpath={.spec.haproxy.enabled}' +++++ exit_status=0 +++++ set -e +++++ '[' 0 '!=' 0 ']' +++++ break +++++ cat /tmp/tmp.1daOQChUiS +++++ cat /tmp/tmp.hp2l5GgXry +++++ rm /tmp/tmp.1daOQChUiS /tmp/tmp.hp2l5GgXry +++++ return 0 ++++ [[ true == \t\r\u\e ]] ++++ echo some-name-haproxy ++++ return +++ local cluster_proxy=some-name-haproxy +++ echo haproxy ++ kubectl_bin get pxc some-name -o 'jsonpath={.status.haproxy.ready}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.xK6Qlb5g7z +++ mktemp ++ local LAST_ERR=/tmp/tmp.EsYUfiCwxa ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pxc some-name -o 'jsonpath={.status.haproxy.ready}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.xK6Qlb5g7z ++ cat /tmp/tmp.EsYUfiCwxa ++ rm /tmp/tmp.xK6Qlb5g7z /tmp/tmp.EsYUfiCwxa ++ return 0 + [[ 3 == \3 ]] + compare_mysql_cmd select-3 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local command_id=select-3 + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local postfix= + local expected_result=/mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-3.sql + [[ perconalab/percona-xtradb-cluster-operator:main-pxc5.7 =~ 8\.0 ]] + run_mysql 'SHOW DATABASES;' '-h some-name-haproxy -umonitor -p'\''test-password2'\''' + local 'command=SHOW DATABASES;' + local 'uri=-h some-name-haproxy -umonitor -p'\''test-password2'\''' ++ get_client_pod ++ kubectl_bin get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' +++ mktemp ++ local LAST_OUT=/tmp/tmp.t27dzxNMPN +++ mktemp ++ local LAST_ERR=/tmp/tmp.lkuU2Si2Pw ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=name=pxc-client -o 'jsonpath={.items[].metadata.name}' ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.t27dzxNMPN ++ cat /tmp/tmp.lkuU2Si2Pw ++ rm /tmp/tmp.t27dzxNMPN /tmp/tmp.lkuU2Si2Pw ++ return 0 + client_pod=pxc-client-64b479df95-4dt7d + wait_pod pxc-client-64b479df95-4dt7d + local pod=pxc-client-64b479df95-4dt7d + local max_retry=480 + local ns= ++ echo pxc-client-64b479df95-4dt7d ++ /usr/bin/sed -E 's/.*-(pxc|proxysql)-[0-9]/\1/' ++ egrep '^(pxc|proxysql)$' + local container= + set +o xtrace pod/pxc-client-64b479df95-4dt7d condition met pxc-client-64b479df95-4dt7d.Ok + set +o xtrace + '[' '!' -s /tmp/tmp.wXQqG5rnGC/select-3.sql ']' + diff -u /mnt/jenkins/workspace/cloud-pxc-operator_PR-1730/e2e-tests/users/compare/select-3.sql /tmp/tmp.wXQqG5rnGC/select-3.sql + destroy users-14491 + local namespace=users-14491 + local ignore_logs=true + desc 'destroy cluster/operator and all other resources' + set +o xtrace ----------------------------------------------------------------------------------- destroy cluster/operator and all other resources ----------------------------------------------------------------------------------- + '[' true == false -o 1 == 1 ']' + grep -v level=info ++ get_operator_pod + grep -v 'the object has been modified' ++ local label_prefix=app.kubernetes.io/ + /usr/bin/sed -r 's/"ts":[0-9.]+//; s^limits-[0-9.]+/^^g' + tee /tmp/tmp.wXQqG5rnGC/operator.log + sort -u +++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -n pxc-operator + grep -v 'get backup status: Job.batch' +++ grep -c percona-xtradb-cluster-operator ++ local check_label=1 ++ [[ 1 -eq 0 ]] ++ kubectl_bin get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator +++ mktemp ++ local LAST_OUT=/tmp/tmp.AW0geXsbLH +++ mktemp ++ local LAST_ERR=/tmp/tmp.c17xFkATZT ++ local exit_status=0 +++ seq 0 2 ++ for i in '$(seq 0 2)' ++ set +e ++ kubectl get pods --selector=app.kubernetes.io/name=percona-xtradb-cluster-operator -o 'jsonpath={.items[].metadata.name}' -n pxc-operator ++ exit_status=0 ++ set -e ++ '[' 0 '!=' 0 ']' ++ break ++ cat /tmp/tmp.AW0geXsbLH ++ cat /tmp/tmp.c17xFkATZT ++ rm /tmp/tmp.AW0geXsbLH /tmp/tmp.c17xFkATZT ++ return 0 + kubectl_bin logs -n pxc-operator percona-xtradb-cluster-operator-784d88b77-d7957 ++ mktemp + local LAST_OUT=/tmp/tmp.tijYYhnHGD ++ mktemp + local LAST_ERR=/tmp/tmp.vUrzIVic1a + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl logs -n pxc-operator percona-xtradb-cluster-operator-784d88b77-d7957 + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.tijYYhnHGD + cat /tmp/tmp.vUrzIVic1a + rm /tmp/tmp.tijYYhnHGD /tmp/tmp.vUrzIVic1a + return 0 2024-06-13T04:03:37.159Z INFO setup Manager starting up {"gitCommit": "852bae96aa0d82139a70b7c3d88a51521401373a", "gitBranch": "PR-1730-852bae96", "buildTime": "2024-06-13T02:01:34Z", "goVersion": "go1.22.4", "os": "linux", "arch": "amd64"} 2024-06-13T04:03:37.159Z INFO setup Runs on {"platform": "kubernetes", "version": "v1.26.15-gke.1390000"} 2024-06-13T04:03:37.162Z INFO setup Registering Components. 2024-06-13T04:03:40.418Z INFO controller-runtime.webhook Registering webhook {"path": "/validate-percona-xtradbcluster"} 2024-06-13T04:03:40.421Z INFO controller-runtime.metrics Starting metrics server 2024-06-13T04:03:40.421Z INFO setup Starting the Cmd. 2024-06-13T04:03:40.421Z INFO starting server {"name": "health probe", "addr": "[::]:8081"} 2024-06-13T04:03:40.422Z INFO controller-runtime.certwatcher Starting certificate watcher 2024-06-13T04:03:40.422Z INFO controller-runtime.certwatcher Updated current TLS certificate 2024-06-13T04:03:40.422Z INFO controller-runtime.metrics Serving metrics server {"bindAddress": ":8080", "secure": false} 2024-06-13T04:03:40.422Z INFO controller-runtime.webhook Serving webhook server {"host": "", "port": 9443} 2024-06-13T04:03:40.422Z INFO controller-runtime.webhook Starting webhook server 2024-06-13T04:03:40.523Z INFO attempting to acquire leader lease pxc-operator/08db1feb.percona.com... 2024-06-13T04:03:40.546Z DEBUG events percona-xtradb-cluster-operator-784d88b77-d7957_be994733-ce93-4ae8-9fa5-3b0334f7af10 became leader {"type": "Normal", "object": {"kind":"Lease","namespace":"pxc-operator","name":"08db1feb.percona.com","uid":"c6cd0d92-ef3c-4c9a-b28d-3b7725f22774","apiVersion":"coordination.k8s.io/v1","resourceVersion":"67070"}, "reason": "LeaderElection"} 2024-06-13T04:03:40.546Z INFO Starting Controller {"controller": "pxcbackup-controller"} 2024-06-13T04:03:40.546Z INFO Starting Controller {"controller": "pxc-controller"} 2024-06-13T04:03:40.546Z INFO Starting Controller {"controller": "pxcrestore-controller"} 2024-06-13T04:03:40.546Z INFO Starting EventSource {"controller": "pxcbackup-controller", "source": "kind source: unknown type"} 2024-06-13T04:03:40.546Z INFO Starting EventSource {"controller": "pxc-controller", "source": "kind source: unknown type"} 2024-06-13T04:03:40.546Z INFO Starting EventSource {"controller": "pxcrestore-controller", "source": "kind source: unknown type"} 2024-06-13T04:03:40.546Z INFO successfully acquired lease pxc-operator/08db1feb.percona.com 2024-06-13T04:03:40.652Z INFO Starting workers {"controller": "pxcbackup-controller", "worker count": 1} 2024-06-13T04:03:40.652Z INFO Starting workers {"controller": "pxc-controller", "worker count": 1} 2024-06-13T04:03:40.658Z INFO Starting workers {"controller": "pxcrestore-controller", "worker count": 1} 2024-06-13T04:04:11.956Z INFO Set CR version {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "ee2a5814-2185-47ae-affa-c6e49336d55d", "version": "1.15.0"} 2024-06-13T04:05:31.371Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "d62b82bd-0e80-4a87-ad3c-d1c4b5283b21", "user": "operator"} 2024-06-13T04:05:31.407Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "d62b82bd-0e80-4a87-ad3c-d1c4b5283b21", "user": "monitor"} 2024-06-13T04:05:31.456Z INFO User monitor: granted privileges {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "d62b82bd-0e80-4a87-ad3c-d1c4b5283b21"} 2024-06-13T04:05:31.498Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "d62b82bd-0e80-4a87-ad3c-d1c4b5283b21", "user": "xtrabackup"} 2024-06-13T04:05:31.543Z INFO User xtrabackup: granted privileges {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "d62b82bd-0e80-4a87-ad3c-d1c4b5283b21"} 2024-06-13T04:05:31.653Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "d62b82bd-0e80-4a87-ad3c-d1c4b5283b21", "err": "get primary pxc pod: not found"} 2024-06-13T04:05:36.451Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "01fce288-83c5-4c67-8912-69b819a7cfa4", "err": "get primary pxc pod: not found"} 2024-06-13T04:05:41.735Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "7ff40672-4680-4320-b108-5e487e0b0bde", "err": "get primary pxc pod: not found"} 2024-06-13T04:05:46.988Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "29bd4d1c-762c-4c78-942f-f85c1b9f7a0e", "err": "get primary pxc pod: not found"} 2024-06-13T04:07:54.253Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c3cca27a-3d64-44f8-8c76-abfb80575655", "user": "root"} 2024-06-13T04:07:54.302Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c3cca27a-3d64-44f8-8c76-abfb80575655", "user": "replication"} 2024-06-13T04:07:54.532Z INFO update PXC version (fetched from db) {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c3cca27a-3d64-44f8-8c76-abfb80575655", "new version": "5.7.44-48-57"} 2024-06-13T04:07:57.804Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c3cca27a-3d64-44f8-8c76-abfb80575655"} 2024-06-13T04:08:02.520Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "85fa276a-d176-4022-b3e9-05c94ff90b55"} 2024-06-13T04:08:07.773Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "a80d8760-341c-4c45-aaf8-9af6d6b58af6"} 2024-06-13T04:08:13.059Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "689f0abf-7e86-4827-8b85-5ffd1aff7958"} 2024-06-13T04:08:18.434Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "29dc1b5d-90f6-4f9b-9eab-f1294a303050"} 2024-06-13T04:08:23.730Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "b43bd21a-eb86-4a80-9a48-5b334019204b"} 2024-06-13T04:08:28.925Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "3d4a3e28-ae44-4d3d-ad72-fca27cd5598c"} 2024-06-13T04:08:34.156Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "9286ddb4-717f-49ea-ab95-9863f96cc483"} 2024-06-13T04:08:39.735Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "5eb696ba-5e91-4d19-89f3-34c55a152254"} 2024-06-13T04:08:44.911Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "85f4f8bd-1480-4480-ae53-316076f40e8c"} 2024-06-13T04:08:49.921Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "58ba9e31-c36a-4534-89d4-d535f0e4bc52"} 2024-06-13T04:08:55.156Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "9caefaff-7355-44cb-a0d9-5324e9581f8a"} 2024-06-13T04:09:00.530Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "994c4224-2c9f-404a-9889-f98531dac702"} 2024-06-13T04:09:05.719Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "bb45d39a-b0d6-4765-ae24-a7d89a5021ae"} 2024-06-13T04:09:08.322Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "56deefd9-7006-4215-b5c3-6653b0e138cc", "user": "root"} 2024-06-13T04:09:08.361Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "56deefd9-7006-4215-b5c3-6653b0e138cc", "user": "root"} 2024-06-13T04:09:08.396Z INFO MySQL init secret created {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "56deefd9-7006-4215-b5c3-6653b0e138cc", "secret": "some-name-mysql-init", "user": "root"} 2024-06-13T04:09:14.246Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "56deefd9-7006-4215-b5c3-6653b0e138cc"} 2024-06-13T04:09:14.280Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "56deefd9-7006-4215-b5c3-6653b0e138cc", "user": "root"} 2024-06-13T04:09:17.635Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "56deefd9-7006-4215-b5c3-6653b0e138cc"} 2024-06-13T04:09:22.936Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "5007fe83-8fcb-4ad8-806c-396befdc6c2a"} 2024-06-13T04:09:28.336Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "5762d53b-30da-4b43-b61a-2e4980dd68d5"} 2024-06-13T04:09:45.968Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c59e1f7f-a30e-4bab-9fab-c55b8ec7113d", "err": "get primary pxc pod: not found"} 2024-06-13T04:09:46.214Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "131ba32c-91bc-439f-8b03-a80eea4d301b", "err": "get primary pxc pod: not found"} 2024-06-13T04:09:50.227Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "131ba32c-91bc-439f-8b03-a80eea4d301b", "error": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR (line:512) : The cluster (with writer hostgroup:11) has not been configured in ProxySQL\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:09:51.109Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "6f09956f-116c-45a0-881f-6fa3a500c3b2", "user": "proxyadmin"} 2024-06-13T04:09:51.109Z INFO Password expiration policy updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "6f09956f-116c-45a0-881f-6fa3a500c3b2", "user": "proxyadmin"} 2024-06-13T04:09:51.183Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "6f09956f-116c-45a0-881f-6fa3a500c3b2", "user": "proxyadmin"} 2024-06-13T04:09:51.194Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "6f09956f-116c-45a0-881f-6fa3a500c3b2", "user": "proxyadmin"} 2024-06-13T04:09:51.194Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "6f09956f-116c-45a0-881f-6fa3a500c3b2", "last-applied-secret": "1a55867d0f4fa0d0643653b2eb57b2f690315d8d843a10599c9839c2a779a95f"} 2024-06-13T04:09:51.452Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "6f09956f-116c-45a0-881f-6fa3a500c3b2", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:10:18.344Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "798b389b-2eca-487a-8b82-b4a336410ab7", "err": "get primary pxc pod: not found"} 2024-06-13T04:10:23.576Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "a422e862-da42-4fe3-83d1-b061d0f57e5e", "err": "get primary pxc pod: not found"} 2024-06-13T04:10:50.238Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e1a97c53-8469-4936-a957-0e4c99a222f0"} 2024-06-13T04:11:00.330Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "a124e874-f727-40ad-943c-168b3954947f"} 2024-06-13T04:11:04.267Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "ea699fd6-edb3-4ffc-87ae-77f8cdf82f9b", "error": "exec syncusers: command terminated with exit code 137 / / ", "errorVerbose": "exec syncusers: command terminated with exit code 137 / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:11:10.462Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "54c7fcd6-a9eb-417b-979c-68987e62d401", "user": "xtrabackup"} 2024-06-13T04:11:10.490Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "54c7fcd6-a9eb-417b-979c-68987e62d401", "user": "xtrabackup"} 2024-06-13T04:11:10.499Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "54c7fcd6-a9eb-417b-979c-68987e62d401", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-13T04:11:10.516Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "54c7fcd6-a9eb-417b-979c-68987e62d401", "user": "xtrabackup"} 2024-06-13T04:11:10.516Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "54c7fcd6-a9eb-417b-979c-68987e62d401", "last-applied-secret": "d11bcc70ca53ea410cd455e13d82e7225917b0861d9b677ed34c02c5f6fbaf1b"} 2024-06-13T04:11:16.279Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "54c7fcd6-a9eb-417b-979c-68987e62d401"} 2024-06-13T04:13:11.129Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "2b72605e-ed27-4c85-a6d0-5daf739c12e5", "primary name": "some-name-pxc-0.some-name-pxc.users-14491.svc.cluster.local"} 2024-06-13T04:13:57.621Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "836c0bfe-0d8e-4d85-b1ed-f96324af6f78"} 2024-06-13T04:14:02.333Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "700826f6-4a8e-4f7d-b6b3-70b4fffedcf8"} 2024-06-13T04:14:07.413Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e9e01450-5c93-4bf6-a4fb-aa56501f1dfa"} 2024-06-13T04:14:12.744Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "f21dd403-8fdb-4a31-8bae-cf772d5a4de7"} 2024-06-13T04:14:18.041Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "0dcecf12-3555-4c20-8369-df8dd88393c7"} 2024-06-13T04:14:19.981Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "0b2a2d97-e1f2-4d63-bab5-50ffbf2c67d5", "user": "monitor"} 2024-06-13T04:14:20.007Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "0b2a2d97-e1f2-4d63-bab5-50ffbf2c67d5", "user": "monitor"} 2024-06-13T04:14:20.015Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "0b2a2d97-e1f2-4d63-bab5-50ffbf2c67d5", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-13T04:14:20.061Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "0b2a2d97-e1f2-4d63-bab5-50ffbf2c67d5", "user": "monitor"} 2024-06-13T04:14:20.071Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "0b2a2d97-e1f2-4d63-bab5-50ffbf2c67d5", "user": "monitor"} 2024-06-13T04:14:20.071Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "0b2a2d97-e1f2-4d63-bab5-50ffbf2c67d5", "last-applied-secret": "4e3282050745046d5044e0fcb5f2bf140d7320a1256a79c8ca2184f38f4f3098"} 2024-06-13T04:14:23.206Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "0b2a2d97-e1f2-4d63-bab5-50ffbf2c67d5", "error": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / ", "errorVerbose": "exec syncusers: unable to upgrade connection: container not found (\"proxysql\") / / \ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:15:14.350Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "5c8035ac-0dba-4132-bd4c-8abeb9ea9433"} 2024-06-13T04:15:19.157Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "a50b834f-cbda-46a0-9999-138370353beb"} 2024-06-13T04:15:24.473Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "7da0beec-8073-499b-989f-99010ecefb20"} 2024-06-13T04:15:29.639Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "f76201fe-8210-4a9f-bd43-6a4cf1476cff"} 2024-06-13T04:15:34.928Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "8a426249-c973-407c-abc9-b87cb6d8346e"} 2024-06-13T04:15:40.357Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "9459c642-e356-4558-a988-d2b353e36f7a"} 2024-06-13T04:15:45.738Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "2035aa5e-dc45-4247-9fae-9724772e3920"} 2024-06-13T04:15:51.044Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "39a26d4e-1aee-4a10-9580-e809f46c2713"} 2024-06-13T04:15:52.847Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "ac0d5de6-a209-4f1e-8e55-7d2affd38ede", "user": "operator"} 2024-06-13T04:15:52.873Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "ac0d5de6-a209-4f1e-8e55-7d2affd38ede", "user": "operator"} 2024-06-13T04:15:52.885Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "ac0d5de6-a209-4f1e-8e55-7d2affd38ede", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-13T04:15:52.899Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "ac0d5de6-a209-4f1e-8e55-7d2affd38ede", "user": "operator"} 2024-06-13T04:15:52.900Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "ac0d5de6-a209-4f1e-8e55-7d2affd38ede", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-13T04:15:54.320Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "ac0d5de6-a209-4f1e-8e55-7d2affd38ede", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-14491.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-14491.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-14491.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-14491.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-14491.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-14491.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:16:30.163Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "173cfd09-e977-40ac-8888-ace8b5753328"} 2024-06-13T04:16:37.938Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "f5e1e45b-5bdc-45c8-bc01-4f9bd31af9cc"} 2024-06-13T04:16:43.159Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "f0f4d86f-5fde-4c53-bffe-ef4e5fcf74c7"} 2024-06-13T04:16:48.518Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "898c2ec2-ca48-49b1-b306-1e2a787b3155"} 2024-06-13T04:16:49.775Z INFO Created user secrets {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "secrets": "my-cluster-secrets-2"} 2024-06-13T04:16:49.775Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "root"} 2024-06-13T04:16:49.814Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "root"} 2024-06-13T04:16:49.822Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "secret": "some-name-mysql-init", "user": "root"} 2024-06-13T04:16:55.487Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0"} 2024-06-13T04:16:55.501Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "root"} 2024-06-13T04:16:55.501Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "operator"} 2024-06-13T04:16:55.531Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "operator"} 2024-06-13T04:16:55.541Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-13T04:16:55.553Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "operator"} 2024-06-13T04:16:55.553Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "monitor"} 2024-06-13T04:16:55.579Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "monitor"} 2024-06-13T04:16:55.588Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-13T04:16:55.634Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "monitor"} 2024-06-13T04:16:55.643Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "monitor"} 2024-06-13T04:16:55.643Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "xtrabackup"} 2024-06-13T04:16:55.667Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "xtrabackup"} 2024-06-13T04:16:55.676Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-13T04:16:55.685Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "xtrabackup"} 2024-06-13T04:16:55.685Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "replication"} 2024-06-13T04:16:55.708Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "replication"} 2024-06-13T04:16:55.718Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-13T04:16:55.726Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "replication"} 2024-06-13T04:16:55.726Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "proxyadmin"} 2024-06-13T04:16:55.778Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "proxyadmin"} 2024-06-13T04:16:55.787Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "user": "proxyadmin"} 2024-06-13T04:16:55.787Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "last-applied-secret": "7e8110c8a5b28b8ed273ec5f843522ed95d92db2484baf8b35f3296639d500d4"} 2024-06-13T04:16:55.787Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "last-applied-secret": "7e8110c8a5b28b8ed273ec5f843522ed95d92db2484baf8b35f3296639d500d4"} 2024-06-13T04:16:56.021Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c1e4e7e4-8b80-425b-973e-ecd73f2212e0", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:18:32.226Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "bebe4236-063b-45f0-9a11-1eebafbbd6aa", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-14491 on 10.169.64.10:53: no such host"} 2024-06-13T04:18:37.917Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "f7250a3b-6861-4c77-85ab-640109cc2f09", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-14491 on 10.169.64.10:53: no such host"} 2024-06-13T04:18:43.258Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "15ba90ed-b3fa-435d-9990-3a5438acf5ce", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-14491 on 10.169.64.10:53: no such host"} 2024-06-13T04:18:53.753Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "a695913d-0b17-4028-bd97-070059c911f7", "primary name": "some-name-pxc-0.some-name-pxc.users-14491.svc.cluster.local"} 2024-06-13T04:18:59.030Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "2ffdc73a-8852-4763-8f17-f74819a22e95", "primary name": "some-name-pxc-0.some-name-pxc.users-14491.svc.cluster.local"} 2024-06-13T04:19:04.287Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "fb2e5afb-cab4-493f-b725-e61c642c9c61", "primary name": "some-name-pxc-0.some-name-pxc.users-14491.svc.cluster.local"} 2024-06-13T04:19:09.466Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "a0be368f-518a-472a-8144-1754f97b0fe9", "primary name": "some-name-pxc-0.some-name-pxc.users-14491.svc.cluster.local"} 2024-06-13T04:19:14.684Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "57f21e6e-bfc8-4006-b612-4c73a684e498", "primary name": "some-name-pxc-0.some-name-pxc.users-14491.svc.cluster.local"} 2024-06-13T04:19:23.675Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "3f071b26-95c9-4038-9b52-15a1f968885a"} 2024-06-13T04:19:28.764Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "8aba9d5d-1a7f-43ad-8207-7cb7a1070dc5"} 2024-06-13T04:19:34.234Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "2afc128a-3bc7-4fe7-86d1-a7cb8ca52f35"} 2024-06-13T04:19:39.058Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "10c47e7a-629c-442b-a4fb-f601838b6c60"} 2024-06-13T04:19:40.869Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "f28bd07f-f136-4aad-8af0-27a20fdc5be1", "user": "operator"} 2024-06-13T04:19:40.902Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "f28bd07f-f136-4aad-8af0-27a20fdc5be1", "user": "operator"} 2024-06-13T04:19:40.913Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "f28bd07f-f136-4aad-8af0-27a20fdc5be1", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-13T04:19:40.923Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "f28bd07f-f136-4aad-8af0-27a20fdc5be1", "user": "operator"} 2024-06-13T04:19:40.923Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "f28bd07f-f136-4aad-8af0-27a20fdc5be1", "last-applied-secret": "13cc8416be95771821b197f90fb659b41329cba82ecc1784461c30e38719f1aa"} 2024-06-13T04:19:42.343Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "f28bd07f-f136-4aad-8af0-27a20fdc5be1", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-14491.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-14491.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-14491.svc.' (using password: YES)\nERROR 1045 (28000): Access denied for user 'operator'@'some-name-proxysql-0.some-name-proxysql-unready.users-14491.svc.' (using password: YES)\nERROR (line:569) : Could not find a primary cluster node\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:20:20.184Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "752ba58b-1e41-4711-9178-943c98e2e2f5"} 2024-06-13T04:20:28.221Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "ea22ff45-e123-4974-9276-2651f26891de"} 2024-06-13T04:20:33.843Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "a0f29b96-0cb9-4efc-9f3d-4fcc746c17e0"} 2024-06-13T04:20:39.067Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c2c1c3f6-0532-49fb-8756-6f4b611b548b"} 2024-06-13T04:20:44.144Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "08a8e439-1eff-4181-bbd1-dc9e8b6dba22"} 2024-06-13T04:20:49.158Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "461d262b-be40-471f-8013-1eaeffd6dc6f"} 2024-06-13T04:20:56.287Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "614e6480-aa52-40ff-8181-2f537dde1b9e"} 2024-06-13T04:20:59.840Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "2f66b25f-eb30-4b92-899e-800008e3aa12"} 2024-06-13T04:21:05.152Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "5d06e271-893c-49cb-b746-c020c0d8d555"} 2024-06-13T04:21:10.721Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "486ccf7c-ee1a-4308-8f7a-45714689625b"} 2024-06-13T04:21:16.719Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "388c68fc-7f65-41fc-9c18-557dc8c49a04"} 2024-06-13T04:21:22.022Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "15964862-4a50-4df4-ba27-d9b8d596d9a3"} 2024-06-13T04:21:27.244Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "0b3166f6-2379-45c1-b1c8-f2583405e18b"} 2024-06-13T04:21:32.631Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "052bd760-4b9f-4d99-9cdf-0a516cb83e6a"} 2024-06-13T04:21:37.875Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "98731fdd-727f-45eb-aff3-753e92051112"} 2024-06-13T04:21:39.495Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "root"} 2024-06-13T04:21:39.536Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "root"} 2024-06-13T04:21:39.544Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "secret": "some-name-mysql-init", "user": "root"} 2024-06-13T04:21:45.112Z DEBUG PXC users synced with ProxySQL {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be"} 2024-06-13T04:21:45.122Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "root"} 2024-06-13T04:21:45.122Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "monitor"} 2024-06-13T04:21:45.160Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "monitor"} 2024-06-13T04:21:45.171Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-13T04:21:45.220Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "monitor"} 2024-06-13T04:21:45.230Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "monitor"} 2024-06-13T04:21:45.230Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "xtrabackup"} 2024-06-13T04:21:45.254Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "xtrabackup"} 2024-06-13T04:21:45.265Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-13T04:21:45.282Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "xtrabackup"} 2024-06-13T04:21:45.282Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "proxyadmin"} 2024-06-13T04:21:45.331Z INFO Proxy user updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "proxyadmin"} 2024-06-13T04:21:45.344Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "user": "proxyadmin"} 2024-06-13T04:21:45.344Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "last-applied-secret": "4df13f95525d15041200f7469a7f956ef5b8b48263369748922999fd91bc0d75"} 2024-06-13T04:21:45.344Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "last-applied-secret": "4df13f95525d15041200f7469a7f956ef5b8b48263369748922999fd91bc0d75"} 2024-06-13T04:21:45.592Z ERROR sync users {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "c69d333c-a2ea-4189-a465-74eb8938d0be", "error": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n", "errorVerbose": "exec syncusers: command terminated with exit code 1 / / ERROR 1045 (28000): ProxySQL Error: Access denied for user 'proxyadmin'@'127.0.0.1' (using password: YES)\nERROR (line:335) : ProxySQL connection check failed. \n-- Could not connect to ProxySQL at localhost:6032 \n-- Please check the ProxySQL connection parameters and status.\n\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).syncPXCUsersWithProxySQL\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/users.go:920\ngithub.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1\n\t/go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1246\nruntime.goexit\n\t/usr/local/go/src/runtime/asm_amd64.s:1695"} 2024-06-13T04:23:53.707Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "d3514700-90f2-4f9d-ba3a-174d0ead809b", "err": "failed to connect to pod some-name-pxc-0: dial tcp: lookup some-name-pxc-0.some-name-pxc.users-14491 on 10.169.64.10:53: no such host"} 2024-06-13T04:23:58.917Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "fe1f7578-e2fa-4bf5-b763-120fe2f2b5f5", "primary name": "some-name-pxc-0.some-name-pxc.users-14491.svc.cluster.local"} 2024-06-13T04:24:04.192Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "d691beb4-89df-4fed-8778-05d7ef7c33b0", "primary name": "some-name-pxc-0.some-name-pxc.users-14491.svc.cluster.local"} 2024-06-13T04:24:09.531Z INFO Unable to find primary pod for replication. No pod with name or ip like this {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "3a41df0b-3c1a-4458-b654-f20b3a804988", "primary name": "some-name-pxc-0.some-name-pxc.users-14491.svc.cluster.local"} 2024-06-13T04:24:11.097Z INFO KubeAPIWarningLogger .metadata.ownerReferences contains duplicate entries; API server dedups owner references in 1.20+, and may reject such requests as early as 1.24; please fix your requests; duplicate UID(s) observed: 890df3f6-1d41-49cd-823e-579a8fc06e9d 2024-06-13T04:24:14.339Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "7616e26d-c282-4ae9-9ed7-862cbe793c7d", "err": "get primary pxc pod: failed to get proxy connection: dial tcp 10.169.79.107:3306: connect: connection refused"} 2024-06-13T04:26:52.344Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "root"} 2024-06-13T04:26:52.382Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "root"} 2024-06-13T04:26:52.395Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "secret": "some-name-mysql-init", "user": "root"} 2024-06-13T04:26:52.412Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "root"} 2024-06-13T04:26:52.412Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "operator"} 2024-06-13T04:26:52.438Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "operator"} 2024-06-13T04:26:52.459Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "secret": "some-name-mysql-init", "user": "operator"} 2024-06-13T04:26:52.475Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "operator"} 2024-06-13T04:26:52.475Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "monitor"} 2024-06-13T04:26:52.503Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "monitor"} 2024-06-13T04:26:52.521Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-13T04:26:52.532Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "monitor"} 2024-06-13T04:26:52.532Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "xtrabackup"} 2024-06-13T04:26:52.557Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "xtrabackup"} 2024-06-13T04:26:52.572Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "secret": "some-name-mysql-init", "user": "xtrabackup"} 2024-06-13T04:26:52.588Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "xtrabackup"} 2024-06-13T04:26:52.588Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "replication"} 2024-06-13T04:26:52.612Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "replication"} 2024-06-13T04:26:52.628Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "secret": "some-name-mysql-init", "user": "replication"} 2024-06-13T04:26:52.646Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "user": "replication"} 2024-06-13T04:26:52.646Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-13T04:26:52.646Z INFO PXC pods will be restarted {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "e3ea3f4c-f9ba-4726-b1a7-bd945a946b55", "last-applied-secret": "6542c6f8a2375ddfcda319bf91c99ca69519730971c577cd07366e356671ee68"} 2024-06-13T04:29:30.509Z INFO Password changed, updating user {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "5c7b8100-9dc1-44ad-8ad3-e5c6a6a1938c", "user": "monitor"} 2024-06-13T04:29:30.534Z INFO User password updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "5c7b8100-9dc1-44ad-8ad3-e5c6a6a1938c", "user": "monitor"} 2024-06-13T04:29:30.544Z INFO MySQL init secret updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "5c7b8100-9dc1-44ad-8ad3-e5c6a6a1938c", "secret": "some-name-mysql-init", "user": "monitor"} 2024-06-13T04:29:30.554Z INFO Internal secrets updated {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "5c7b8100-9dc1-44ad-8ad3-e5c6a6a1938c", "user": "monitor"} 2024-06-13T04:29:30.555Z INFO Proxy pods will be restarted {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "5c7b8100-9dc1-44ad-8ad3-e5c6a6a1938c", "last-applied-secret": "81935ecf30ae523a6ee69582354f9488d47dbeb0f8c7c572af96870f2024284d"} 2024-06-13T04:29:44.338Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "89ec7721-493c-4d74-9092-6d60df06235f", "err": "get primary pxc pod: failed to get proxy connection: driver: bad connection"} 2024-06-13T04:29:47.585Z INFO reconcile replication error {"controller": "pxc-controller", "namespace": "users-14491", "name": "some-name", "reconcileID": "dcf234a3-9122-4041-91d8-5eacd8f5d57b", "err": "get primary pxc pod: failed to get proxy connection: driver: bad connection"} github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc.(*ReconcilePerconaXtraDBCluster).resyncPXCUsersWithProxySQL.func1 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:222 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:261 /go/pkg/mod/sigs.k8s.io/controller-runtime@v0.18.0/pkg/internal/controller/controller.go:324 /go/src/github.com/percona/percona-xtradb-cluster-operator/pkg/controller/pxc/controller.go:1248 [mysql] 2024/06/13 04:29:41 packets.go:37: unexpected EOF [mysql] 2024/06/13 04:29:42 packets.go:37: unexpected EOF [mysql] 2024/06/13 04:29:43 packets.go:37: unexpected EOF [mysql] 2024/06/13 04:29:44 packets.go:37: unexpected EOF [mysql] 2024/06/13 04:29:45 packets.go:37: unexpected EOF [mysql] 2024/06/13 04:29:46 packets.go:37: unexpected EOF [mysql] 2024/06/13 04:29:47 packets.go:37: unexpected EOF sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).processNextWorkItem sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).reconcileHandler sigs.k8s.io/controller-runtime/pkg/internal/controller.(*Controller).Start.func2.2 + kubectl get pxc --all-namespaces -o wide + grep -v NAMESPACE + xargs -L 1 sh -xc 'kubectl patch pxc -n $0 $1 --type=merge -p "{\"metadata\":{\"finalizers\":[]}}"' + kubectl patch pxc -n users-14491 some-name --type=merge -p '{"metadata":{"finalizers":[]}}' perconaxtradbcluster.pxc.percona.com/some-name patched + kubectl_bin delete pxc --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.Qv88QDtcAf ++ mktemp + local LAST_ERR=/tmp/tmp.0DR3yLkXoV + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.Qv88QDtcAf perconaxtradbcluster.pxc.percona.com "some-name" deleted + cat /tmp/tmp.0DR3yLkXoV + rm /tmp/tmp.Qv88QDtcAf /tmp/tmp.0DR3yLkXoV + return 0 + kubectl_bin delete pxc-backup --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.pSnyjA2puN ++ mktemp + local LAST_ERR=/tmp/tmp.WmDQ6NSD4U + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-backup --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.pSnyjA2puN No resources found + cat /tmp/tmp.WmDQ6NSD4U + rm /tmp/tmp.pSnyjA2puN /tmp/tmp.WmDQ6NSD4U + return 0 + kubectl_bin delete pxc-restore --all --all-namespaces ++ mktemp + local LAST_OUT=/tmp/tmp.XBH3RfOa0i ++ mktemp + local LAST_ERR=/tmp/tmp.NeiASgP71k + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete pxc-restore --all --all-namespaces + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.XBH3RfOa0i No resources found + cat /tmp/tmp.NeiASgP71k + rm /tmp/tmp.XBH3RfOa0i /tmp/tmp.NeiASgP71k + return 0 + kubectl_bin delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook ++ mktemp + local LAST_OUT=/tmp/tmp.SL3HxhAtSr ++ mktemp + local LAST_ERR=/tmp/tmp.fFtXTKS09A + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete ValidatingWebhookConfiguration percona-xtradbcluster-webhook + exit_status=0 + set -e + '[' 0 '!=' 0 ']' + break + cat /tmp/tmp.SL3HxhAtSr validatingwebhookconfiguration.admissionregistration.k8s.io "percona-xtradbcluster-webhook" deleted + cat /tmp/tmp.fFtXTKS09A + rm /tmp/tmp.SL3HxhAtSr /tmp/tmp.fFtXTKS09A + return 0 + kubectl_bin delete -f https://github.com/jetstack/cert-manager/releases/download/v1.14.2/cert-manager.yaml + : + '[' '!' -z '' ']' + '[' -n pxc-operator ']' + kubectl_bin delete --grace-period=0 --force=true namespace users-14491 + rm -rf /tmp/tmp.wXQqG5rnGC + kubectl_bin delete --grace-period=0 --force=true namespace pxc-operator ++ mktemp ++ mktemp + local LAST_OUT=/tmp/tmp.ASAQzUtnkS + desc 'test passed' + set +o xtrace ----------------------------------------------------------------------------------- test passed ----------------------------------------------------------------------------------- + local LAST_OUT=/tmp/tmp.qdqPAQJyqB ++ mktemp ++ mktemp + local LAST_ERR=/tmp/tmp.Y8IiHWdy0f + local exit_status=0 + local LAST_ERR=/tmp/tmp.cpfWUmVExU ++ seq 0 2 + local exit_status=0 ++ seq 0 2 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace users-14491 + for i in '$(seq 0 2)' + set +e + kubectl delete --grace-period=0 --force=true namespace pxc-operator